##// END OF EJS Templates
context: add arbitraryfilectx, which can represent files outside the workdir...
Phil Cohen -
r34053:d2fc8842 default
parent child Browse files
Show More
@@ -1,100 +1,81 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 from __future__ import absolute_import
2 from __future__ import absolute_import
3
3
4 import getopt
4 import getopt
5 import sys
5 import sys
6
6
7 import hgdemandimport
7 import hgdemandimport
8 hgdemandimport.enable()
8 hgdemandimport.enable()
9
9
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import (
11 from mercurial import (
12 context,
12 error,
13 error,
13 fancyopts,
14 fancyopts,
14 simplemerge,
15 simplemerge,
15 ui as uimod,
16 ui as uimod,
16 util,
17 util,
17 )
18 )
18
19
19 options = [('L', 'label', [], _('labels to use on conflict markers')),
20 options = [('L', 'label', [], _('labels to use on conflict markers')),
20 ('a', 'text', None, _('treat all files as text')),
21 ('a', 'text', None, _('treat all files as text')),
21 ('p', 'print', None,
22 ('p', 'print', None,
22 _('print results instead of overwriting LOCAL')),
23 _('print results instead of overwriting LOCAL')),
23 ('', 'no-minimal', None, _('no effect (DEPRECATED)')),
24 ('', 'no-minimal', None, _('no effect (DEPRECATED)')),
24 ('h', 'help', None, _('display help and exit')),
25 ('h', 'help', None, _('display help and exit')),
25 ('q', 'quiet', None, _('suppress output'))]
26 ('q', 'quiet', None, _('suppress output'))]
26
27
27 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
28 usage = _('''simplemerge [OPTS] LOCAL BASE OTHER
28
29
29 Simple three-way file merge utility with a minimal feature set.
30 Simple three-way file merge utility with a minimal feature set.
30
31
31 Apply to LOCAL the changes necessary to go from BASE to OTHER.
32 Apply to LOCAL the changes necessary to go from BASE to OTHER.
32
33
33 By default, LOCAL is overwritten with the results of this operation.
34 By default, LOCAL is overwritten with the results of this operation.
34 ''')
35 ''')
35
36
36 class ParseError(Exception):
37 class ParseError(Exception):
37 """Exception raised on errors in parsing the command line."""
38 """Exception raised on errors in parsing the command line."""
38
39
39 def showhelp():
40 def showhelp():
40 sys.stdout.write(usage)
41 sys.stdout.write(usage)
41 sys.stdout.write('\noptions:\n')
42 sys.stdout.write('\noptions:\n')
42
43
43 out_opts = []
44 out_opts = []
44 for shortopt, longopt, default, desc in options:
45 for shortopt, longopt, default, desc in options:
45 out_opts.append(('%2s%s' % (shortopt and '-%s' % shortopt,
46 out_opts.append(('%2s%s' % (shortopt and '-%s' % shortopt,
46 longopt and ' --%s' % longopt),
47 longopt and ' --%s' % longopt),
47 '%s' % desc))
48 '%s' % desc))
48 opts_len = max([len(opt[0]) for opt in out_opts])
49 opts_len = max([len(opt[0]) for opt in out_opts])
49 for first, second in out_opts:
50 for first, second in out_opts:
50 sys.stdout.write(' %-*s %s\n' % (opts_len, first, second))
51 sys.stdout.write(' %-*s %s\n' % (opts_len, first, second))
51
52
52 class filebackedctx(object):
53 """simplemerge requires context-like objects"""
54 def __init__(self, path):
55 self._path = path
56
57 def decodeddata(self):
58 with open(self._path, "rb") as f:
59 return f.read()
60
61 def flags(self):
62 return ''
63
64 def path(self):
65 return self._path
66
67 def write(self, data, flags):
68 assert not flags
69 with open(self._path, "w") as f:
70 f.write(data)
71
72 try:
53 try:
73 for fp in (sys.stdin, sys.stdout, sys.stderr):
54 for fp in (sys.stdin, sys.stdout, sys.stderr):
74 util.setbinary(fp)
55 util.setbinary(fp)
75
56
76 opts = {}
57 opts = {}
77 try:
58 try:
78 args = fancyopts.fancyopts(sys.argv[1:], options, opts)
59 args = fancyopts.fancyopts(sys.argv[1:], options, opts)
79 except getopt.GetoptError as e:
60 except getopt.GetoptError as e:
80 raise ParseError(e)
61 raise ParseError(e)
81 if opts['help']:
62 if opts['help']:
82 showhelp()
63 showhelp()
83 sys.exit(0)
64 sys.exit(0)
84 if len(args) != 3:
65 if len(args) != 3:
85 raise ParseError(_('wrong number of arguments'))
66 raise ParseError(_('wrong number of arguments'))
86 local, base, other = args
67 local, base, other = args
87 sys.exit(simplemerge.simplemerge(uimod.ui.load(),
68 sys.exit(simplemerge.simplemerge(uimod.ui.load(),
88 filebackedctx(local),
69 context.arbitraryfilectx(local),
89 filebackedctx(base),
70 context.arbitraryfilectx(base),
90 filebackedctx(other),
71 context.arbitraryfilectx(other),
91 **opts))
72 **opts))
92 except ParseError as e:
73 except ParseError as e:
93 sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
74 sys.stdout.write("%s: %s\n" % (sys.argv[0], e))
94 showhelp()
75 showhelp()
95 sys.exit(1)
76 sys.exit(1)
96 except error.Abort as e:
77 except error.Abort as e:
97 sys.stderr.write("abort: %s\n" % e)
78 sys.stderr.write("abort: %s\n" % e)
98 sys.exit(255)
79 sys.exit(255)
99 except KeyboardInterrupt:
80 except KeyboardInterrupt:
100 sys.exit(255)
81 sys.exit(255)
@@ -1,2387 +1,2418 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirnodes,
25 wdirnodes,
26 wdirrev,
26 wdirrev,
27 )
27 )
28 from . import (
28 from . import (
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 mdiff,
33 mdiff,
34 obsolete as obsmod,
34 obsolete as obsmod,
35 patch,
35 patch,
36 pathutil,
36 pathutil,
37 phases,
37 phases,
38 pycompat,
38 pycompat,
39 repoview,
39 repoview,
40 revlog,
40 revlog,
41 scmutil,
41 scmutil,
42 sparse,
42 sparse,
43 subrepo,
43 subrepo,
44 util,
44 util,
45 )
45 )
46
46
47 propertycache = util.propertycache
47 propertycache = util.propertycache
48
48
49 nonascii = re.compile(r'[^\x21-\x7f]').search
49 nonascii = re.compile(r'[^\x21-\x7f]').search
50
50
51 class basectx(object):
51 class basectx(object):
52 """A basectx object represents the common logic for its children:
52 """A basectx object represents the common logic for its children:
53 changectx: read-only context that is already present in the repo,
53 changectx: read-only context that is already present in the repo,
54 workingctx: a context that represents the working directory and can
54 workingctx: a context that represents the working directory and can
55 be committed,
55 be committed,
56 memctx: a context that represents changes in-memory and can also
56 memctx: a context that represents changes in-memory and can also
57 be committed."""
57 be committed."""
58 def __new__(cls, repo, changeid='', *args, **kwargs):
58 def __new__(cls, repo, changeid='', *args, **kwargs):
59 if isinstance(changeid, basectx):
59 if isinstance(changeid, basectx):
60 return changeid
60 return changeid
61
61
62 o = super(basectx, cls).__new__(cls)
62 o = super(basectx, cls).__new__(cls)
63
63
64 o._repo = repo
64 o._repo = repo
65 o._rev = nullrev
65 o._rev = nullrev
66 o._node = nullid
66 o._node = nullid
67
67
68 return o
68 return o
69
69
70 def __bytes__(self):
70 def __bytes__(self):
71 return short(self.node())
71 return short(self.node())
72
72
73 __str__ = encoding.strmethod(__bytes__)
73 __str__ = encoding.strmethod(__bytes__)
74
74
75 def __int__(self):
75 def __int__(self):
76 return self.rev()
76 return self.rev()
77
77
78 def __repr__(self):
78 def __repr__(self):
79 return r"<%s %s>" % (type(self).__name__, str(self))
79 return r"<%s %s>" % (type(self).__name__, str(self))
80
80
81 def __eq__(self, other):
81 def __eq__(self, other):
82 try:
82 try:
83 return type(self) == type(other) and self._rev == other._rev
83 return type(self) == type(other) and self._rev == other._rev
84 except AttributeError:
84 except AttributeError:
85 return False
85 return False
86
86
87 def __ne__(self, other):
87 def __ne__(self, other):
88 return not (self == other)
88 return not (self == other)
89
89
90 def __contains__(self, key):
90 def __contains__(self, key):
91 return key in self._manifest
91 return key in self._manifest
92
92
93 def __getitem__(self, key):
93 def __getitem__(self, key):
94 return self.filectx(key)
94 return self.filectx(key)
95
95
96 def __iter__(self):
96 def __iter__(self):
97 return iter(self._manifest)
97 return iter(self._manifest)
98
98
99 def _buildstatusmanifest(self, status):
99 def _buildstatusmanifest(self, status):
100 """Builds a manifest that includes the given status results, if this is
100 """Builds a manifest that includes the given status results, if this is
101 a working copy context. For non-working copy contexts, it just returns
101 a working copy context. For non-working copy contexts, it just returns
102 the normal manifest."""
102 the normal manifest."""
103 return self.manifest()
103 return self.manifest()
104
104
105 def _matchstatus(self, other, match):
105 def _matchstatus(self, other, match):
106 """This internal method provides a way for child objects to override the
106 """This internal method provides a way for child objects to override the
107 match operator.
107 match operator.
108 """
108 """
109 return match
109 return match
110
110
111 def _buildstatus(self, other, s, match, listignored, listclean,
111 def _buildstatus(self, other, s, match, listignored, listclean,
112 listunknown):
112 listunknown):
113 """build a status with respect to another context"""
113 """build a status with respect to another context"""
114 # Load earliest manifest first for caching reasons. More specifically,
114 # Load earliest manifest first for caching reasons. More specifically,
115 # if you have revisions 1000 and 1001, 1001 is probably stored as a
115 # if you have revisions 1000 and 1001, 1001 is probably stored as a
116 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
116 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
117 # 1000 and cache it so that when you read 1001, we just need to apply a
117 # 1000 and cache it so that when you read 1001, we just need to apply a
118 # delta to what's in the cache. So that's one full reconstruction + one
118 # delta to what's in the cache. So that's one full reconstruction + one
119 # delta application.
119 # delta application.
120 mf2 = None
120 mf2 = None
121 if self.rev() is not None and self.rev() < other.rev():
121 if self.rev() is not None and self.rev() < other.rev():
122 mf2 = self._buildstatusmanifest(s)
122 mf2 = self._buildstatusmanifest(s)
123 mf1 = other._buildstatusmanifest(s)
123 mf1 = other._buildstatusmanifest(s)
124 if mf2 is None:
124 if mf2 is None:
125 mf2 = self._buildstatusmanifest(s)
125 mf2 = self._buildstatusmanifest(s)
126
126
127 modified, added = [], []
127 modified, added = [], []
128 removed = []
128 removed = []
129 clean = []
129 clean = []
130 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
130 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
131 deletedset = set(deleted)
131 deletedset = set(deleted)
132 d = mf1.diff(mf2, match=match, clean=listclean)
132 d = mf1.diff(mf2, match=match, clean=listclean)
133 for fn, value in d.iteritems():
133 for fn, value in d.iteritems():
134 if fn in deletedset:
134 if fn in deletedset:
135 continue
135 continue
136 if value is None:
136 if value is None:
137 clean.append(fn)
137 clean.append(fn)
138 continue
138 continue
139 (node1, flag1), (node2, flag2) = value
139 (node1, flag1), (node2, flag2) = value
140 if node1 is None:
140 if node1 is None:
141 added.append(fn)
141 added.append(fn)
142 elif node2 is None:
142 elif node2 is None:
143 removed.append(fn)
143 removed.append(fn)
144 elif flag1 != flag2:
144 elif flag1 != flag2:
145 modified.append(fn)
145 modified.append(fn)
146 elif node2 not in wdirnodes:
146 elif node2 not in wdirnodes:
147 # When comparing files between two commits, we save time by
147 # When comparing files between two commits, we save time by
148 # not comparing the file contents when the nodeids differ.
148 # not comparing the file contents when the nodeids differ.
149 # Note that this means we incorrectly report a reverted change
149 # Note that this means we incorrectly report a reverted change
150 # to a file as a modification.
150 # to a file as a modification.
151 modified.append(fn)
151 modified.append(fn)
152 elif self[fn].cmp(other[fn]):
152 elif self[fn].cmp(other[fn]):
153 modified.append(fn)
153 modified.append(fn)
154 else:
154 else:
155 clean.append(fn)
155 clean.append(fn)
156
156
157 if removed:
157 if removed:
158 # need to filter files if they are already reported as removed
158 # need to filter files if they are already reported as removed
159 unknown = [fn for fn in unknown if fn not in mf1 and
159 unknown = [fn for fn in unknown if fn not in mf1 and
160 (not match or match(fn))]
160 (not match or match(fn))]
161 ignored = [fn for fn in ignored if fn not in mf1 and
161 ignored = [fn for fn in ignored if fn not in mf1 and
162 (not match or match(fn))]
162 (not match or match(fn))]
163 # if they're deleted, don't report them as removed
163 # if they're deleted, don't report them as removed
164 removed = [fn for fn in removed if fn not in deletedset]
164 removed = [fn for fn in removed if fn not in deletedset]
165
165
166 return scmutil.status(modified, added, removed, deleted, unknown,
166 return scmutil.status(modified, added, removed, deleted, unknown,
167 ignored, clean)
167 ignored, clean)
168
168
169 @propertycache
169 @propertycache
170 def substate(self):
170 def substate(self):
171 return subrepo.state(self, self._repo.ui)
171 return subrepo.state(self, self._repo.ui)
172
172
173 def subrev(self, subpath):
173 def subrev(self, subpath):
174 return self.substate[subpath][1]
174 return self.substate[subpath][1]
175
175
176 def rev(self):
176 def rev(self):
177 return self._rev
177 return self._rev
178 def node(self):
178 def node(self):
179 return self._node
179 return self._node
180 def hex(self):
180 def hex(self):
181 return hex(self.node())
181 return hex(self.node())
182 def manifest(self):
182 def manifest(self):
183 return self._manifest
183 return self._manifest
184 def manifestctx(self):
184 def manifestctx(self):
185 return self._manifestctx
185 return self._manifestctx
186 def repo(self):
186 def repo(self):
187 return self._repo
187 return self._repo
188 def phasestr(self):
188 def phasestr(self):
189 return phases.phasenames[self.phase()]
189 return phases.phasenames[self.phase()]
190 def mutable(self):
190 def mutable(self):
191 return self.phase() > phases.public
191 return self.phase() > phases.public
192
192
193 def getfileset(self, expr):
193 def getfileset(self, expr):
194 return fileset.getfileset(self, expr)
194 return fileset.getfileset(self, expr)
195
195
196 def obsolete(self):
196 def obsolete(self):
197 """True if the changeset is obsolete"""
197 """True if the changeset is obsolete"""
198 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
198 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
199
199
200 def extinct(self):
200 def extinct(self):
201 """True if the changeset is extinct"""
201 """True if the changeset is extinct"""
202 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
202 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
203
203
204 def unstable(self):
204 def unstable(self):
205 msg = ("'context.unstable' is deprecated, "
205 msg = ("'context.unstable' is deprecated, "
206 "use 'context.orphan'")
206 "use 'context.orphan'")
207 self._repo.ui.deprecwarn(msg, '4.4')
207 self._repo.ui.deprecwarn(msg, '4.4')
208 return self.orphan()
208 return self.orphan()
209
209
210 def orphan(self):
210 def orphan(self):
211 """True if the changeset is not obsolete but it's ancestor are"""
211 """True if the changeset is not obsolete but it's ancestor are"""
212 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
212 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
213
213
214 def bumped(self):
214 def bumped(self):
215 msg = ("'context.bumped' is deprecated, "
215 msg = ("'context.bumped' is deprecated, "
216 "use 'context.phasedivergent'")
216 "use 'context.phasedivergent'")
217 self._repo.ui.deprecwarn(msg, '4.4')
217 self._repo.ui.deprecwarn(msg, '4.4')
218 return self.phasedivergent()
218 return self.phasedivergent()
219
219
220 def phasedivergent(self):
220 def phasedivergent(self):
221 """True if the changeset try to be a successor of a public changeset
221 """True if the changeset try to be a successor of a public changeset
222
222
223 Only non-public and non-obsolete changesets may be bumped.
223 Only non-public and non-obsolete changesets may be bumped.
224 """
224 """
225 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
225 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
226
226
227 def divergent(self):
227 def divergent(self):
228 msg = ("'context.divergent' is deprecated, "
228 msg = ("'context.divergent' is deprecated, "
229 "use 'context.contentdivergent'")
229 "use 'context.contentdivergent'")
230 self._repo.ui.deprecwarn(msg, '4.4')
230 self._repo.ui.deprecwarn(msg, '4.4')
231 return self.contentdivergent()
231 return self.contentdivergent()
232
232
233 def contentdivergent(self):
233 def contentdivergent(self):
234 """Is a successors of a changeset with multiple possible successors set
234 """Is a successors of a changeset with multiple possible successors set
235
235
236 Only non-public and non-obsolete changesets may be divergent.
236 Only non-public and non-obsolete changesets may be divergent.
237 """
237 """
238 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
238 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
239
239
240 def troubled(self):
240 def troubled(self):
241 msg = ("'context.troubled' is deprecated, "
241 msg = ("'context.troubled' is deprecated, "
242 "use 'context.isunstable'")
242 "use 'context.isunstable'")
243 self._repo.ui.deprecwarn(msg, '4.4')
243 self._repo.ui.deprecwarn(msg, '4.4')
244 return self.isunstable()
244 return self.isunstable()
245
245
246 def isunstable(self):
246 def isunstable(self):
247 """True if the changeset is either unstable, bumped or divergent"""
247 """True if the changeset is either unstable, bumped or divergent"""
248 return self.orphan() or self.phasedivergent() or self.contentdivergent()
248 return self.orphan() or self.phasedivergent() or self.contentdivergent()
249
249
250 def troubles(self):
250 def troubles(self):
251 """Keep the old version around in order to avoid breaking extensions
251 """Keep the old version around in order to avoid breaking extensions
252 about different return values.
252 about different return values.
253 """
253 """
254 msg = ("'context.troubles' is deprecated, "
254 msg = ("'context.troubles' is deprecated, "
255 "use 'context.instabilities'")
255 "use 'context.instabilities'")
256 self._repo.ui.deprecwarn(msg, '4.4')
256 self._repo.ui.deprecwarn(msg, '4.4')
257
257
258 troubles = []
258 troubles = []
259 if self.orphan():
259 if self.orphan():
260 troubles.append('orphan')
260 troubles.append('orphan')
261 if self.phasedivergent():
261 if self.phasedivergent():
262 troubles.append('bumped')
262 troubles.append('bumped')
263 if self.contentdivergent():
263 if self.contentdivergent():
264 troubles.append('divergent')
264 troubles.append('divergent')
265 return troubles
265 return troubles
266
266
267 def instabilities(self):
267 def instabilities(self):
268 """return the list of instabilities affecting this changeset.
268 """return the list of instabilities affecting this changeset.
269
269
270 Instabilities are returned as strings. possible values are:
270 Instabilities are returned as strings. possible values are:
271 - orphan,
271 - orphan,
272 - phase-divergent,
272 - phase-divergent,
273 - content-divergent.
273 - content-divergent.
274 """
274 """
275 instabilities = []
275 instabilities = []
276 if self.orphan():
276 if self.orphan():
277 instabilities.append('orphan')
277 instabilities.append('orphan')
278 if self.phasedivergent():
278 if self.phasedivergent():
279 instabilities.append('phase-divergent')
279 instabilities.append('phase-divergent')
280 if self.contentdivergent():
280 if self.contentdivergent():
281 instabilities.append('content-divergent')
281 instabilities.append('content-divergent')
282 return instabilities
282 return instabilities
283
283
284 def parents(self):
284 def parents(self):
285 """return contexts for each parent changeset"""
285 """return contexts for each parent changeset"""
286 return self._parents
286 return self._parents
287
287
288 def p1(self):
288 def p1(self):
289 return self._parents[0]
289 return self._parents[0]
290
290
291 def p2(self):
291 def p2(self):
292 parents = self._parents
292 parents = self._parents
293 if len(parents) == 2:
293 if len(parents) == 2:
294 return parents[1]
294 return parents[1]
295 return changectx(self._repo, nullrev)
295 return changectx(self._repo, nullrev)
296
296
297 def _fileinfo(self, path):
297 def _fileinfo(self, path):
298 if r'_manifest' in self.__dict__:
298 if r'_manifest' in self.__dict__:
299 try:
299 try:
300 return self._manifest[path], self._manifest.flags(path)
300 return self._manifest[path], self._manifest.flags(path)
301 except KeyError:
301 except KeyError:
302 raise error.ManifestLookupError(self._node, path,
302 raise error.ManifestLookupError(self._node, path,
303 _('not found in manifest'))
303 _('not found in manifest'))
304 if r'_manifestdelta' in self.__dict__ or path in self.files():
304 if r'_manifestdelta' in self.__dict__ or path in self.files():
305 if path in self._manifestdelta:
305 if path in self._manifestdelta:
306 return (self._manifestdelta[path],
306 return (self._manifestdelta[path],
307 self._manifestdelta.flags(path))
307 self._manifestdelta.flags(path))
308 mfl = self._repo.manifestlog
308 mfl = self._repo.manifestlog
309 try:
309 try:
310 node, flag = mfl[self._changeset.manifest].find(path)
310 node, flag = mfl[self._changeset.manifest].find(path)
311 except KeyError:
311 except KeyError:
312 raise error.ManifestLookupError(self._node, path,
312 raise error.ManifestLookupError(self._node, path,
313 _('not found in manifest'))
313 _('not found in manifest'))
314
314
315 return node, flag
315 return node, flag
316
316
317 def filenode(self, path):
317 def filenode(self, path):
318 return self._fileinfo(path)[0]
318 return self._fileinfo(path)[0]
319
319
320 def flags(self, path):
320 def flags(self, path):
321 try:
321 try:
322 return self._fileinfo(path)[1]
322 return self._fileinfo(path)[1]
323 except error.LookupError:
323 except error.LookupError:
324 return ''
324 return ''
325
325
326 def sub(self, path, allowcreate=True):
326 def sub(self, path, allowcreate=True):
327 '''return a subrepo for the stored revision of path, never wdir()'''
327 '''return a subrepo for the stored revision of path, never wdir()'''
328 return subrepo.subrepo(self, path, allowcreate=allowcreate)
328 return subrepo.subrepo(self, path, allowcreate=allowcreate)
329
329
330 def nullsub(self, path, pctx):
330 def nullsub(self, path, pctx):
331 return subrepo.nullsubrepo(self, path, pctx)
331 return subrepo.nullsubrepo(self, path, pctx)
332
332
333 def workingsub(self, path):
333 def workingsub(self, path):
334 '''return a subrepo for the stored revision, or wdir if this is a wdir
334 '''return a subrepo for the stored revision, or wdir if this is a wdir
335 context.
335 context.
336 '''
336 '''
337 return subrepo.subrepo(self, path, allowwdir=True)
337 return subrepo.subrepo(self, path, allowwdir=True)
338
338
339 def match(self, pats=None, include=None, exclude=None, default='glob',
339 def match(self, pats=None, include=None, exclude=None, default='glob',
340 listsubrepos=False, badfn=None):
340 listsubrepos=False, badfn=None):
341 r = self._repo
341 r = self._repo
342 return matchmod.match(r.root, r.getcwd(), pats,
342 return matchmod.match(r.root, r.getcwd(), pats,
343 include, exclude, default,
343 include, exclude, default,
344 auditor=r.nofsauditor, ctx=self,
344 auditor=r.nofsauditor, ctx=self,
345 listsubrepos=listsubrepos, badfn=badfn)
345 listsubrepos=listsubrepos, badfn=badfn)
346
346
347 def diff(self, ctx2=None, match=None, **opts):
347 def diff(self, ctx2=None, match=None, **opts):
348 """Returns a diff generator for the given contexts and matcher"""
348 """Returns a diff generator for the given contexts and matcher"""
349 if ctx2 is None:
349 if ctx2 is None:
350 ctx2 = self.p1()
350 ctx2 = self.p1()
351 if ctx2 is not None:
351 if ctx2 is not None:
352 ctx2 = self._repo[ctx2]
352 ctx2 = self._repo[ctx2]
353 diffopts = patch.diffopts(self._repo.ui, opts)
353 diffopts = patch.diffopts(self._repo.ui, opts)
354 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
354 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
355
355
356 def dirs(self):
356 def dirs(self):
357 return self._manifest.dirs()
357 return self._manifest.dirs()
358
358
359 def hasdir(self, dir):
359 def hasdir(self, dir):
360 return self._manifest.hasdir(dir)
360 return self._manifest.hasdir(dir)
361
361
362 def status(self, other=None, match=None, listignored=False,
362 def status(self, other=None, match=None, listignored=False,
363 listclean=False, listunknown=False, listsubrepos=False):
363 listclean=False, listunknown=False, listsubrepos=False):
364 """return status of files between two nodes or node and working
364 """return status of files between two nodes or node and working
365 directory.
365 directory.
366
366
367 If other is None, compare this node with working directory.
367 If other is None, compare this node with working directory.
368
368
369 returns (modified, added, removed, deleted, unknown, ignored, clean)
369 returns (modified, added, removed, deleted, unknown, ignored, clean)
370 """
370 """
371
371
372 ctx1 = self
372 ctx1 = self
373 ctx2 = self._repo[other]
373 ctx2 = self._repo[other]
374
374
375 # This next code block is, admittedly, fragile logic that tests for
375 # This next code block is, admittedly, fragile logic that tests for
376 # reversing the contexts and wouldn't need to exist if it weren't for
376 # reversing the contexts and wouldn't need to exist if it weren't for
377 # the fast (and common) code path of comparing the working directory
377 # the fast (and common) code path of comparing the working directory
378 # with its first parent.
378 # with its first parent.
379 #
379 #
380 # What we're aiming for here is the ability to call:
380 # What we're aiming for here is the ability to call:
381 #
381 #
382 # workingctx.status(parentctx)
382 # workingctx.status(parentctx)
383 #
383 #
384 # If we always built the manifest for each context and compared those,
384 # If we always built the manifest for each context and compared those,
385 # then we'd be done. But the special case of the above call means we
385 # then we'd be done. But the special case of the above call means we
386 # just copy the manifest of the parent.
386 # just copy the manifest of the parent.
387 reversed = False
387 reversed = False
388 if (not isinstance(ctx1, changectx)
388 if (not isinstance(ctx1, changectx)
389 and isinstance(ctx2, changectx)):
389 and isinstance(ctx2, changectx)):
390 reversed = True
390 reversed = True
391 ctx1, ctx2 = ctx2, ctx1
391 ctx1, ctx2 = ctx2, ctx1
392
392
393 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
393 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
394 match = ctx2._matchstatus(ctx1, match)
394 match = ctx2._matchstatus(ctx1, match)
395 r = scmutil.status([], [], [], [], [], [], [])
395 r = scmutil.status([], [], [], [], [], [], [])
396 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
396 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
397 listunknown)
397 listunknown)
398
398
399 if reversed:
399 if reversed:
400 # Reverse added and removed. Clear deleted, unknown and ignored as
400 # Reverse added and removed. Clear deleted, unknown and ignored as
401 # these make no sense to reverse.
401 # these make no sense to reverse.
402 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
402 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
403 r.clean)
403 r.clean)
404
404
405 if listsubrepos:
405 if listsubrepos:
406 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
406 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
407 try:
407 try:
408 rev2 = ctx2.subrev(subpath)
408 rev2 = ctx2.subrev(subpath)
409 except KeyError:
409 except KeyError:
410 # A subrepo that existed in node1 was deleted between
410 # A subrepo that existed in node1 was deleted between
411 # node1 and node2 (inclusive). Thus, ctx2's substate
411 # node1 and node2 (inclusive). Thus, ctx2's substate
412 # won't contain that subpath. The best we can do ignore it.
412 # won't contain that subpath. The best we can do ignore it.
413 rev2 = None
413 rev2 = None
414 submatch = matchmod.subdirmatcher(subpath, match)
414 submatch = matchmod.subdirmatcher(subpath, match)
415 s = sub.status(rev2, match=submatch, ignored=listignored,
415 s = sub.status(rev2, match=submatch, ignored=listignored,
416 clean=listclean, unknown=listunknown,
416 clean=listclean, unknown=listunknown,
417 listsubrepos=True)
417 listsubrepos=True)
418 for rfiles, sfiles in zip(r, s):
418 for rfiles, sfiles in zip(r, s):
419 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
419 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
420
420
421 for l in r:
421 for l in r:
422 l.sort()
422 l.sort()
423
423
424 return r
424 return r
425
425
426 def _filterederror(repo, changeid):
426 def _filterederror(repo, changeid):
427 """build an exception to be raised about a filtered changeid
427 """build an exception to be raised about a filtered changeid
428
428
429 This is extracted in a function to help extensions (eg: evolve) to
429 This is extracted in a function to help extensions (eg: evolve) to
430 experiment with various message variants."""
430 experiment with various message variants."""
431 if repo.filtername.startswith('visible'):
431 if repo.filtername.startswith('visible'):
432 msg = _("hidden revision '%s'") % changeid
432 msg = _("hidden revision '%s'") % changeid
433 hint = _('use --hidden to access hidden revisions')
433 hint = _('use --hidden to access hidden revisions')
434 return error.FilteredRepoLookupError(msg, hint=hint)
434 return error.FilteredRepoLookupError(msg, hint=hint)
435 msg = _("filtered revision '%s' (not in '%s' subset)")
435 msg = _("filtered revision '%s' (not in '%s' subset)")
436 msg %= (changeid, repo.filtername)
436 msg %= (changeid, repo.filtername)
437 return error.FilteredRepoLookupError(msg)
437 return error.FilteredRepoLookupError(msg)
438
438
439 class changectx(basectx):
439 class changectx(basectx):
440 """A changecontext object makes access to data related to a particular
440 """A changecontext object makes access to data related to a particular
441 changeset convenient. It represents a read-only context already present in
441 changeset convenient. It represents a read-only context already present in
442 the repo."""
442 the repo."""
443 def __init__(self, repo, changeid=''):
443 def __init__(self, repo, changeid=''):
444 """changeid is a revision number, node, or tag"""
444 """changeid is a revision number, node, or tag"""
445
445
446 # since basectx.__new__ already took care of copying the object, we
446 # since basectx.__new__ already took care of copying the object, we
447 # don't need to do anything in __init__, so we just exit here
447 # don't need to do anything in __init__, so we just exit here
448 if isinstance(changeid, basectx):
448 if isinstance(changeid, basectx):
449 return
449 return
450
450
451 if changeid == '':
451 if changeid == '':
452 changeid = '.'
452 changeid = '.'
453 self._repo = repo
453 self._repo = repo
454
454
455 try:
455 try:
456 if isinstance(changeid, int):
456 if isinstance(changeid, int):
457 self._node = repo.changelog.node(changeid)
457 self._node = repo.changelog.node(changeid)
458 self._rev = changeid
458 self._rev = changeid
459 return
459 return
460 if not pycompat.ispy3 and isinstance(changeid, long):
460 if not pycompat.ispy3 and isinstance(changeid, long):
461 changeid = str(changeid)
461 changeid = str(changeid)
462 if changeid == 'null':
462 if changeid == 'null':
463 self._node = nullid
463 self._node = nullid
464 self._rev = nullrev
464 self._rev = nullrev
465 return
465 return
466 if changeid == 'tip':
466 if changeid == 'tip':
467 self._node = repo.changelog.tip()
467 self._node = repo.changelog.tip()
468 self._rev = repo.changelog.rev(self._node)
468 self._rev = repo.changelog.rev(self._node)
469 return
469 return
470 if changeid == '.' or changeid == repo.dirstate.p1():
470 if changeid == '.' or changeid == repo.dirstate.p1():
471 # this is a hack to delay/avoid loading obsmarkers
471 # this is a hack to delay/avoid loading obsmarkers
472 # when we know that '.' won't be hidden
472 # when we know that '.' won't be hidden
473 self._node = repo.dirstate.p1()
473 self._node = repo.dirstate.p1()
474 self._rev = repo.unfiltered().changelog.rev(self._node)
474 self._rev = repo.unfiltered().changelog.rev(self._node)
475 return
475 return
476 if len(changeid) == 20:
476 if len(changeid) == 20:
477 try:
477 try:
478 self._node = changeid
478 self._node = changeid
479 self._rev = repo.changelog.rev(changeid)
479 self._rev = repo.changelog.rev(changeid)
480 return
480 return
481 except error.FilteredRepoLookupError:
481 except error.FilteredRepoLookupError:
482 raise
482 raise
483 except LookupError:
483 except LookupError:
484 pass
484 pass
485
485
486 try:
486 try:
487 r = int(changeid)
487 r = int(changeid)
488 if '%d' % r != changeid:
488 if '%d' % r != changeid:
489 raise ValueError
489 raise ValueError
490 l = len(repo.changelog)
490 l = len(repo.changelog)
491 if r < 0:
491 if r < 0:
492 r += l
492 r += l
493 if r < 0 or r >= l and r != wdirrev:
493 if r < 0 or r >= l and r != wdirrev:
494 raise ValueError
494 raise ValueError
495 self._rev = r
495 self._rev = r
496 self._node = repo.changelog.node(r)
496 self._node = repo.changelog.node(r)
497 return
497 return
498 except error.FilteredIndexError:
498 except error.FilteredIndexError:
499 raise
499 raise
500 except (ValueError, OverflowError, IndexError):
500 except (ValueError, OverflowError, IndexError):
501 pass
501 pass
502
502
503 if len(changeid) == 40:
503 if len(changeid) == 40:
504 try:
504 try:
505 self._node = bin(changeid)
505 self._node = bin(changeid)
506 self._rev = repo.changelog.rev(self._node)
506 self._rev = repo.changelog.rev(self._node)
507 return
507 return
508 except error.FilteredLookupError:
508 except error.FilteredLookupError:
509 raise
509 raise
510 except (TypeError, LookupError):
510 except (TypeError, LookupError):
511 pass
511 pass
512
512
513 # lookup bookmarks through the name interface
513 # lookup bookmarks through the name interface
514 try:
514 try:
515 self._node = repo.names.singlenode(repo, changeid)
515 self._node = repo.names.singlenode(repo, changeid)
516 self._rev = repo.changelog.rev(self._node)
516 self._rev = repo.changelog.rev(self._node)
517 return
517 return
518 except KeyError:
518 except KeyError:
519 pass
519 pass
520 except error.FilteredRepoLookupError:
520 except error.FilteredRepoLookupError:
521 raise
521 raise
522 except error.RepoLookupError:
522 except error.RepoLookupError:
523 pass
523 pass
524
524
525 self._node = repo.unfiltered().changelog._partialmatch(changeid)
525 self._node = repo.unfiltered().changelog._partialmatch(changeid)
526 if self._node is not None:
526 if self._node is not None:
527 self._rev = repo.changelog.rev(self._node)
527 self._rev = repo.changelog.rev(self._node)
528 return
528 return
529
529
530 # lookup failed
530 # lookup failed
531 # check if it might have come from damaged dirstate
531 # check if it might have come from damaged dirstate
532 #
532 #
533 # XXX we could avoid the unfiltered if we had a recognizable
533 # XXX we could avoid the unfiltered if we had a recognizable
534 # exception for filtered changeset access
534 # exception for filtered changeset access
535 if changeid in repo.unfiltered().dirstate.parents():
535 if changeid in repo.unfiltered().dirstate.parents():
536 msg = _("working directory has unknown parent '%s'!")
536 msg = _("working directory has unknown parent '%s'!")
537 raise error.Abort(msg % short(changeid))
537 raise error.Abort(msg % short(changeid))
538 try:
538 try:
539 if len(changeid) == 20 and nonascii(changeid):
539 if len(changeid) == 20 and nonascii(changeid):
540 changeid = hex(changeid)
540 changeid = hex(changeid)
541 except TypeError:
541 except TypeError:
542 pass
542 pass
543 except (error.FilteredIndexError, error.FilteredLookupError,
543 except (error.FilteredIndexError, error.FilteredLookupError,
544 error.FilteredRepoLookupError):
544 error.FilteredRepoLookupError):
545 raise _filterederror(repo, changeid)
545 raise _filterederror(repo, changeid)
546 except IndexError:
546 except IndexError:
547 pass
547 pass
548 raise error.RepoLookupError(
548 raise error.RepoLookupError(
549 _("unknown revision '%s'") % changeid)
549 _("unknown revision '%s'") % changeid)
550
550
551 def __hash__(self):
551 def __hash__(self):
552 try:
552 try:
553 return hash(self._rev)
553 return hash(self._rev)
554 except AttributeError:
554 except AttributeError:
555 return id(self)
555 return id(self)
556
556
557 def __nonzero__(self):
557 def __nonzero__(self):
558 return self._rev != nullrev
558 return self._rev != nullrev
559
559
560 __bool__ = __nonzero__
560 __bool__ = __nonzero__
561
561
562 @propertycache
562 @propertycache
563 def _changeset(self):
563 def _changeset(self):
564 return self._repo.changelog.changelogrevision(self.rev())
564 return self._repo.changelog.changelogrevision(self.rev())
565
565
566 @propertycache
566 @propertycache
567 def _manifest(self):
567 def _manifest(self):
568 return self._manifestctx.read()
568 return self._manifestctx.read()
569
569
570 @property
570 @property
571 def _manifestctx(self):
571 def _manifestctx(self):
572 return self._repo.manifestlog[self._changeset.manifest]
572 return self._repo.manifestlog[self._changeset.manifest]
573
573
574 @propertycache
574 @propertycache
575 def _manifestdelta(self):
575 def _manifestdelta(self):
576 return self._manifestctx.readdelta()
576 return self._manifestctx.readdelta()
577
577
578 @propertycache
578 @propertycache
579 def _parents(self):
579 def _parents(self):
580 repo = self._repo
580 repo = self._repo
581 p1, p2 = repo.changelog.parentrevs(self._rev)
581 p1, p2 = repo.changelog.parentrevs(self._rev)
582 if p2 == nullrev:
582 if p2 == nullrev:
583 return [changectx(repo, p1)]
583 return [changectx(repo, p1)]
584 return [changectx(repo, p1), changectx(repo, p2)]
584 return [changectx(repo, p1), changectx(repo, p2)]
585
585
586 def changeset(self):
586 def changeset(self):
587 c = self._changeset
587 c = self._changeset
588 return (
588 return (
589 c.manifest,
589 c.manifest,
590 c.user,
590 c.user,
591 c.date,
591 c.date,
592 c.files,
592 c.files,
593 c.description,
593 c.description,
594 c.extra,
594 c.extra,
595 )
595 )
596 def manifestnode(self):
596 def manifestnode(self):
597 return self._changeset.manifest
597 return self._changeset.manifest
598
598
599 def user(self):
599 def user(self):
600 return self._changeset.user
600 return self._changeset.user
601 def date(self):
601 def date(self):
602 return self._changeset.date
602 return self._changeset.date
603 def files(self):
603 def files(self):
604 return self._changeset.files
604 return self._changeset.files
605 def description(self):
605 def description(self):
606 return self._changeset.description
606 return self._changeset.description
607 def branch(self):
607 def branch(self):
608 return encoding.tolocal(self._changeset.extra.get("branch"))
608 return encoding.tolocal(self._changeset.extra.get("branch"))
609 def closesbranch(self):
609 def closesbranch(self):
610 return 'close' in self._changeset.extra
610 return 'close' in self._changeset.extra
611 def extra(self):
611 def extra(self):
612 return self._changeset.extra
612 return self._changeset.extra
613 def tags(self):
613 def tags(self):
614 return self._repo.nodetags(self._node)
614 return self._repo.nodetags(self._node)
615 def bookmarks(self):
615 def bookmarks(self):
616 return self._repo.nodebookmarks(self._node)
616 return self._repo.nodebookmarks(self._node)
617 def phase(self):
617 def phase(self):
618 return self._repo._phasecache.phase(self._repo, self._rev)
618 return self._repo._phasecache.phase(self._repo, self._rev)
619 def hidden(self):
619 def hidden(self):
620 return self._rev in repoview.filterrevs(self._repo, 'visible')
620 return self._rev in repoview.filterrevs(self._repo, 'visible')
621
621
622 def children(self):
622 def children(self):
623 """return contexts for each child changeset"""
623 """return contexts for each child changeset"""
624 c = self._repo.changelog.children(self._node)
624 c = self._repo.changelog.children(self._node)
625 return [changectx(self._repo, x) for x in c]
625 return [changectx(self._repo, x) for x in c]
626
626
627 def ancestors(self):
627 def ancestors(self):
628 for a in self._repo.changelog.ancestors([self._rev]):
628 for a in self._repo.changelog.ancestors([self._rev]):
629 yield changectx(self._repo, a)
629 yield changectx(self._repo, a)
630
630
631 def descendants(self):
631 def descendants(self):
632 for d in self._repo.changelog.descendants([self._rev]):
632 for d in self._repo.changelog.descendants([self._rev]):
633 yield changectx(self._repo, d)
633 yield changectx(self._repo, d)
634
634
635 def filectx(self, path, fileid=None, filelog=None):
635 def filectx(self, path, fileid=None, filelog=None):
636 """get a file context from this changeset"""
636 """get a file context from this changeset"""
637 if fileid is None:
637 if fileid is None:
638 fileid = self.filenode(path)
638 fileid = self.filenode(path)
639 return filectx(self._repo, path, fileid=fileid,
639 return filectx(self._repo, path, fileid=fileid,
640 changectx=self, filelog=filelog)
640 changectx=self, filelog=filelog)
641
641
642 def ancestor(self, c2, warn=False):
642 def ancestor(self, c2, warn=False):
643 """return the "best" ancestor context of self and c2
643 """return the "best" ancestor context of self and c2
644
644
645 If there are multiple candidates, it will show a message and check
645 If there are multiple candidates, it will show a message and check
646 merge.preferancestor configuration before falling back to the
646 merge.preferancestor configuration before falling back to the
647 revlog ancestor."""
647 revlog ancestor."""
648 # deal with workingctxs
648 # deal with workingctxs
649 n2 = c2._node
649 n2 = c2._node
650 if n2 is None:
650 if n2 is None:
651 n2 = c2._parents[0]._node
651 n2 = c2._parents[0]._node
652 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
652 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
653 if not cahs:
653 if not cahs:
654 anc = nullid
654 anc = nullid
655 elif len(cahs) == 1:
655 elif len(cahs) == 1:
656 anc = cahs[0]
656 anc = cahs[0]
657 else:
657 else:
658 # experimental config: merge.preferancestor
658 # experimental config: merge.preferancestor
659 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
659 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
660 try:
660 try:
661 ctx = changectx(self._repo, r)
661 ctx = changectx(self._repo, r)
662 except error.RepoLookupError:
662 except error.RepoLookupError:
663 continue
663 continue
664 anc = ctx.node()
664 anc = ctx.node()
665 if anc in cahs:
665 if anc in cahs:
666 break
666 break
667 else:
667 else:
668 anc = self._repo.changelog.ancestor(self._node, n2)
668 anc = self._repo.changelog.ancestor(self._node, n2)
669 if warn:
669 if warn:
670 self._repo.ui.status(
670 self._repo.ui.status(
671 (_("note: using %s as ancestor of %s and %s\n") %
671 (_("note: using %s as ancestor of %s and %s\n") %
672 (short(anc), short(self._node), short(n2))) +
672 (short(anc), short(self._node), short(n2))) +
673 ''.join(_(" alternatively, use --config "
673 ''.join(_(" alternatively, use --config "
674 "merge.preferancestor=%s\n") %
674 "merge.preferancestor=%s\n") %
675 short(n) for n in sorted(cahs) if n != anc))
675 short(n) for n in sorted(cahs) if n != anc))
676 return changectx(self._repo, anc)
676 return changectx(self._repo, anc)
677
677
678 def descendant(self, other):
678 def descendant(self, other):
679 """True if other is descendant of this changeset"""
679 """True if other is descendant of this changeset"""
680 return self._repo.changelog.descendant(self._rev, other._rev)
680 return self._repo.changelog.descendant(self._rev, other._rev)
681
681
682 def walk(self, match):
682 def walk(self, match):
683 '''Generates matching file names.'''
683 '''Generates matching file names.'''
684
684
685 # Wrap match.bad method to have message with nodeid
685 # Wrap match.bad method to have message with nodeid
686 def bad(fn, msg):
686 def bad(fn, msg):
687 # The manifest doesn't know about subrepos, so don't complain about
687 # The manifest doesn't know about subrepos, so don't complain about
688 # paths into valid subrepos.
688 # paths into valid subrepos.
689 if any(fn == s or fn.startswith(s + '/')
689 if any(fn == s or fn.startswith(s + '/')
690 for s in self.substate):
690 for s in self.substate):
691 return
691 return
692 match.bad(fn, _('no such file in rev %s') % self)
692 match.bad(fn, _('no such file in rev %s') % self)
693
693
694 m = matchmod.badmatch(match, bad)
694 m = matchmod.badmatch(match, bad)
695 return self._manifest.walk(m)
695 return self._manifest.walk(m)
696
696
697 def matches(self, match):
697 def matches(self, match):
698 return self.walk(match)
698 return self.walk(match)
699
699
700 class basefilectx(object):
700 class basefilectx(object):
701 """A filecontext object represents the common logic for its children:
701 """A filecontext object represents the common logic for its children:
702 filectx: read-only access to a filerevision that is already present
702 filectx: read-only access to a filerevision that is already present
703 in the repo,
703 in the repo,
704 workingfilectx: a filecontext that represents files from the working
704 workingfilectx: a filecontext that represents files from the working
705 directory,
705 directory,
706 memfilectx: a filecontext that represents files in-memory,
706 memfilectx: a filecontext that represents files in-memory,
707 overlayfilectx: duplicate another filecontext with some fields overridden.
707 overlayfilectx: duplicate another filecontext with some fields overridden.
708 """
708 """
709 @propertycache
709 @propertycache
710 def _filelog(self):
710 def _filelog(self):
711 return self._repo.file(self._path)
711 return self._repo.file(self._path)
712
712
713 @propertycache
713 @propertycache
714 def _changeid(self):
714 def _changeid(self):
715 if r'_changeid' in self.__dict__:
715 if r'_changeid' in self.__dict__:
716 return self._changeid
716 return self._changeid
717 elif r'_changectx' in self.__dict__:
717 elif r'_changectx' in self.__dict__:
718 return self._changectx.rev()
718 return self._changectx.rev()
719 elif r'_descendantrev' in self.__dict__:
719 elif r'_descendantrev' in self.__dict__:
720 # this file context was created from a revision with a known
720 # this file context was created from a revision with a known
721 # descendant, we can (lazily) correct for linkrev aliases
721 # descendant, we can (lazily) correct for linkrev aliases
722 return self._adjustlinkrev(self._descendantrev)
722 return self._adjustlinkrev(self._descendantrev)
723 else:
723 else:
724 return self._filelog.linkrev(self._filerev)
724 return self._filelog.linkrev(self._filerev)
725
725
726 @propertycache
726 @propertycache
727 def _filenode(self):
727 def _filenode(self):
728 if r'_fileid' in self.__dict__:
728 if r'_fileid' in self.__dict__:
729 return self._filelog.lookup(self._fileid)
729 return self._filelog.lookup(self._fileid)
730 else:
730 else:
731 return self._changectx.filenode(self._path)
731 return self._changectx.filenode(self._path)
732
732
733 @propertycache
733 @propertycache
734 def _filerev(self):
734 def _filerev(self):
735 return self._filelog.rev(self._filenode)
735 return self._filelog.rev(self._filenode)
736
736
737 @propertycache
737 @propertycache
738 def _repopath(self):
738 def _repopath(self):
739 return self._path
739 return self._path
740
740
741 def __nonzero__(self):
741 def __nonzero__(self):
742 try:
742 try:
743 self._filenode
743 self._filenode
744 return True
744 return True
745 except error.LookupError:
745 except error.LookupError:
746 # file is missing
746 # file is missing
747 return False
747 return False
748
748
749 __bool__ = __nonzero__
749 __bool__ = __nonzero__
750
750
751 def __bytes__(self):
751 def __bytes__(self):
752 try:
752 try:
753 return "%s@%s" % (self.path(), self._changectx)
753 return "%s@%s" % (self.path(), self._changectx)
754 except error.LookupError:
754 except error.LookupError:
755 return "%s@???" % self.path()
755 return "%s@???" % self.path()
756
756
757 __str__ = encoding.strmethod(__bytes__)
757 __str__ = encoding.strmethod(__bytes__)
758
758
759 def __repr__(self):
759 def __repr__(self):
760 return "<%s %s>" % (type(self).__name__, str(self))
760 return "<%s %s>" % (type(self).__name__, str(self))
761
761
762 def __hash__(self):
762 def __hash__(self):
763 try:
763 try:
764 return hash((self._path, self._filenode))
764 return hash((self._path, self._filenode))
765 except AttributeError:
765 except AttributeError:
766 return id(self)
766 return id(self)
767
767
768 def __eq__(self, other):
768 def __eq__(self, other):
769 try:
769 try:
770 return (type(self) == type(other) and self._path == other._path
770 return (type(self) == type(other) and self._path == other._path
771 and self._filenode == other._filenode)
771 and self._filenode == other._filenode)
772 except AttributeError:
772 except AttributeError:
773 return False
773 return False
774
774
775 def __ne__(self, other):
775 def __ne__(self, other):
776 return not (self == other)
776 return not (self == other)
777
777
778 def filerev(self):
778 def filerev(self):
779 return self._filerev
779 return self._filerev
780 def filenode(self):
780 def filenode(self):
781 return self._filenode
781 return self._filenode
782 @propertycache
782 @propertycache
783 def _flags(self):
783 def _flags(self):
784 return self._changectx.flags(self._path)
784 return self._changectx.flags(self._path)
785 def flags(self):
785 def flags(self):
786 return self._flags
786 return self._flags
787 def filelog(self):
787 def filelog(self):
788 return self._filelog
788 return self._filelog
789 def rev(self):
789 def rev(self):
790 return self._changeid
790 return self._changeid
791 def linkrev(self):
791 def linkrev(self):
792 return self._filelog.linkrev(self._filerev)
792 return self._filelog.linkrev(self._filerev)
793 def node(self):
793 def node(self):
794 return self._changectx.node()
794 return self._changectx.node()
795 def hex(self):
795 def hex(self):
796 return self._changectx.hex()
796 return self._changectx.hex()
797 def user(self):
797 def user(self):
798 return self._changectx.user()
798 return self._changectx.user()
799 def date(self):
799 def date(self):
800 return self._changectx.date()
800 return self._changectx.date()
801 def files(self):
801 def files(self):
802 return self._changectx.files()
802 return self._changectx.files()
803 def description(self):
803 def description(self):
804 return self._changectx.description()
804 return self._changectx.description()
805 def branch(self):
805 def branch(self):
806 return self._changectx.branch()
806 return self._changectx.branch()
807 def extra(self):
807 def extra(self):
808 return self._changectx.extra()
808 return self._changectx.extra()
809 def phase(self):
809 def phase(self):
810 return self._changectx.phase()
810 return self._changectx.phase()
811 def phasestr(self):
811 def phasestr(self):
812 return self._changectx.phasestr()
812 return self._changectx.phasestr()
813 def manifest(self):
813 def manifest(self):
814 return self._changectx.manifest()
814 return self._changectx.manifest()
815 def changectx(self):
815 def changectx(self):
816 return self._changectx
816 return self._changectx
817 def renamed(self):
817 def renamed(self):
818 return self._copied
818 return self._copied
819 def repo(self):
819 def repo(self):
820 return self._repo
820 return self._repo
821 def size(self):
821 def size(self):
822 return len(self.data())
822 return len(self.data())
823
823
824 def path(self):
824 def path(self):
825 return self._path
825 return self._path
826
826
827 def isbinary(self):
827 def isbinary(self):
828 try:
828 try:
829 return util.binary(self.data())
829 return util.binary(self.data())
830 except IOError:
830 except IOError:
831 return False
831 return False
832 def isexec(self):
832 def isexec(self):
833 return 'x' in self.flags()
833 return 'x' in self.flags()
834 def islink(self):
834 def islink(self):
835 return 'l' in self.flags()
835 return 'l' in self.flags()
836
836
837 def isabsent(self):
837 def isabsent(self):
838 """whether this filectx represents a file not in self._changectx
838 """whether this filectx represents a file not in self._changectx
839
839
840 This is mainly for merge code to detect change/delete conflicts. This is
840 This is mainly for merge code to detect change/delete conflicts. This is
841 expected to be True for all subclasses of basectx."""
841 expected to be True for all subclasses of basectx."""
842 return False
842 return False
843
843
844 _customcmp = False
844 _customcmp = False
845 def cmp(self, fctx):
845 def cmp(self, fctx):
846 """compare with other file context
846 """compare with other file context
847
847
848 returns True if different than fctx.
848 returns True if different than fctx.
849 """
849 """
850 if fctx._customcmp:
850 if fctx._customcmp:
851 return fctx.cmp(self)
851 return fctx.cmp(self)
852
852
853 if (fctx._filenode is None
853 if (fctx._filenode is None
854 and (self._repo._encodefilterpats
854 and (self._repo._encodefilterpats
855 # if file data starts with '\1\n', empty metadata block is
855 # if file data starts with '\1\n', empty metadata block is
856 # prepended, which adds 4 bytes to filelog.size().
856 # prepended, which adds 4 bytes to filelog.size().
857 or self.size() - 4 == fctx.size())
857 or self.size() - 4 == fctx.size())
858 or self.size() == fctx.size()):
858 or self.size() == fctx.size()):
859 return self._filelog.cmp(self._filenode, fctx.data())
859 return self._filelog.cmp(self._filenode, fctx.data())
860
860
861 return True
861 return True
862
862
863 def _adjustlinkrev(self, srcrev, inclusive=False):
863 def _adjustlinkrev(self, srcrev, inclusive=False):
864 """return the first ancestor of <srcrev> introducing <fnode>
864 """return the first ancestor of <srcrev> introducing <fnode>
865
865
866 If the linkrev of the file revision does not point to an ancestor of
866 If the linkrev of the file revision does not point to an ancestor of
867 srcrev, we'll walk down the ancestors until we find one introducing
867 srcrev, we'll walk down the ancestors until we find one introducing
868 this file revision.
868 this file revision.
869
869
870 :srcrev: the changeset revision we search ancestors from
870 :srcrev: the changeset revision we search ancestors from
871 :inclusive: if true, the src revision will also be checked
871 :inclusive: if true, the src revision will also be checked
872 """
872 """
873 repo = self._repo
873 repo = self._repo
874 cl = repo.unfiltered().changelog
874 cl = repo.unfiltered().changelog
875 mfl = repo.manifestlog
875 mfl = repo.manifestlog
876 # fetch the linkrev
876 # fetch the linkrev
877 lkr = self.linkrev()
877 lkr = self.linkrev()
878 # hack to reuse ancestor computation when searching for renames
878 # hack to reuse ancestor computation when searching for renames
879 memberanc = getattr(self, '_ancestrycontext', None)
879 memberanc = getattr(self, '_ancestrycontext', None)
880 iteranc = None
880 iteranc = None
881 if srcrev is None:
881 if srcrev is None:
882 # wctx case, used by workingfilectx during mergecopy
882 # wctx case, used by workingfilectx during mergecopy
883 revs = [p.rev() for p in self._repo[None].parents()]
883 revs = [p.rev() for p in self._repo[None].parents()]
884 inclusive = True # we skipped the real (revless) source
884 inclusive = True # we skipped the real (revless) source
885 else:
885 else:
886 revs = [srcrev]
886 revs = [srcrev]
887 if memberanc is None:
887 if memberanc is None:
888 memberanc = iteranc = cl.ancestors(revs, lkr,
888 memberanc = iteranc = cl.ancestors(revs, lkr,
889 inclusive=inclusive)
889 inclusive=inclusive)
890 # check if this linkrev is an ancestor of srcrev
890 # check if this linkrev is an ancestor of srcrev
891 if lkr not in memberanc:
891 if lkr not in memberanc:
892 if iteranc is None:
892 if iteranc is None:
893 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
893 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
894 fnode = self._filenode
894 fnode = self._filenode
895 path = self._path
895 path = self._path
896 for a in iteranc:
896 for a in iteranc:
897 ac = cl.read(a) # get changeset data (we avoid object creation)
897 ac = cl.read(a) # get changeset data (we avoid object creation)
898 if path in ac[3]: # checking the 'files' field.
898 if path in ac[3]: # checking the 'files' field.
899 # The file has been touched, check if the content is
899 # The file has been touched, check if the content is
900 # similar to the one we search for.
900 # similar to the one we search for.
901 if fnode == mfl[ac[0]].readfast().get(path):
901 if fnode == mfl[ac[0]].readfast().get(path):
902 return a
902 return a
903 # In theory, we should never get out of that loop without a result.
903 # In theory, we should never get out of that loop without a result.
904 # But if manifest uses a buggy file revision (not children of the
904 # But if manifest uses a buggy file revision (not children of the
905 # one it replaces) we could. Such a buggy situation will likely
905 # one it replaces) we could. Such a buggy situation will likely
906 # result is crash somewhere else at to some point.
906 # result is crash somewhere else at to some point.
907 return lkr
907 return lkr
908
908
909 def introrev(self):
909 def introrev(self):
910 """return the rev of the changeset which introduced this file revision
910 """return the rev of the changeset which introduced this file revision
911
911
912 This method is different from linkrev because it take into account the
912 This method is different from linkrev because it take into account the
913 changeset the filectx was created from. It ensures the returned
913 changeset the filectx was created from. It ensures the returned
914 revision is one of its ancestors. This prevents bugs from
914 revision is one of its ancestors. This prevents bugs from
915 'linkrev-shadowing' when a file revision is used by multiple
915 'linkrev-shadowing' when a file revision is used by multiple
916 changesets.
916 changesets.
917 """
917 """
918 lkr = self.linkrev()
918 lkr = self.linkrev()
919 attrs = vars(self)
919 attrs = vars(self)
920 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
920 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
921 if noctx or self.rev() == lkr:
921 if noctx or self.rev() == lkr:
922 return self.linkrev()
922 return self.linkrev()
923 return self._adjustlinkrev(self.rev(), inclusive=True)
923 return self._adjustlinkrev(self.rev(), inclusive=True)
924
924
925 def _parentfilectx(self, path, fileid, filelog):
925 def _parentfilectx(self, path, fileid, filelog):
926 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
926 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
927 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
927 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
928 if '_changeid' in vars(self) or '_changectx' in vars(self):
928 if '_changeid' in vars(self) or '_changectx' in vars(self):
929 # If self is associated with a changeset (probably explicitly
929 # If self is associated with a changeset (probably explicitly
930 # fed), ensure the created filectx is associated with a
930 # fed), ensure the created filectx is associated with a
931 # changeset that is an ancestor of self.changectx.
931 # changeset that is an ancestor of self.changectx.
932 # This lets us later use _adjustlinkrev to get a correct link.
932 # This lets us later use _adjustlinkrev to get a correct link.
933 fctx._descendantrev = self.rev()
933 fctx._descendantrev = self.rev()
934 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
934 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
935 elif '_descendantrev' in vars(self):
935 elif '_descendantrev' in vars(self):
936 # Otherwise propagate _descendantrev if we have one associated.
936 # Otherwise propagate _descendantrev if we have one associated.
937 fctx._descendantrev = self._descendantrev
937 fctx._descendantrev = self._descendantrev
938 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
938 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
939 return fctx
939 return fctx
940
940
941 def parents(self):
941 def parents(self):
942 _path = self._path
942 _path = self._path
943 fl = self._filelog
943 fl = self._filelog
944 parents = self._filelog.parents(self._filenode)
944 parents = self._filelog.parents(self._filenode)
945 pl = [(_path, node, fl) for node in parents if node != nullid]
945 pl = [(_path, node, fl) for node in parents if node != nullid]
946
946
947 r = fl.renamed(self._filenode)
947 r = fl.renamed(self._filenode)
948 if r:
948 if r:
949 # - In the simple rename case, both parent are nullid, pl is empty.
949 # - In the simple rename case, both parent are nullid, pl is empty.
950 # - In case of merge, only one of the parent is null id and should
950 # - In case of merge, only one of the parent is null id and should
951 # be replaced with the rename information. This parent is -always-
951 # be replaced with the rename information. This parent is -always-
952 # the first one.
952 # the first one.
953 #
953 #
954 # As null id have always been filtered out in the previous list
954 # As null id have always been filtered out in the previous list
955 # comprehension, inserting to 0 will always result in "replacing
955 # comprehension, inserting to 0 will always result in "replacing
956 # first nullid parent with rename information.
956 # first nullid parent with rename information.
957 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
957 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
958
958
959 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
959 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
960
960
961 def p1(self):
961 def p1(self):
962 return self.parents()[0]
962 return self.parents()[0]
963
963
964 def p2(self):
964 def p2(self):
965 p = self.parents()
965 p = self.parents()
966 if len(p) == 2:
966 if len(p) == 2:
967 return p[1]
967 return p[1]
968 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
968 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
969
969
970 def annotate(self, follow=False, linenumber=False, skiprevs=None,
970 def annotate(self, follow=False, linenumber=False, skiprevs=None,
971 diffopts=None):
971 diffopts=None):
972 '''returns a list of tuples of ((ctx, number), line) for each line
972 '''returns a list of tuples of ((ctx, number), line) for each line
973 in the file, where ctx is the filectx of the node where
973 in the file, where ctx is the filectx of the node where
974 that line was last changed; if linenumber parameter is true, number is
974 that line was last changed; if linenumber parameter is true, number is
975 the line number at the first appearance in the managed file, otherwise,
975 the line number at the first appearance in the managed file, otherwise,
976 number has a fixed value of False.
976 number has a fixed value of False.
977 '''
977 '''
978
978
979 def lines(text):
979 def lines(text):
980 if text.endswith("\n"):
980 if text.endswith("\n"):
981 return text.count("\n")
981 return text.count("\n")
982 return text.count("\n") + int(bool(text))
982 return text.count("\n") + int(bool(text))
983
983
984 if linenumber:
984 if linenumber:
985 def decorate(text, rev):
985 def decorate(text, rev):
986 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
986 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
987 else:
987 else:
988 def decorate(text, rev):
988 def decorate(text, rev):
989 return ([(rev, False)] * lines(text), text)
989 return ([(rev, False)] * lines(text), text)
990
990
991 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
991 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
992
992
993 def parents(f):
993 def parents(f):
994 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
994 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
995 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
995 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
996 # from the topmost introrev (= srcrev) down to p.linkrev() if it
996 # from the topmost introrev (= srcrev) down to p.linkrev() if it
997 # isn't an ancestor of the srcrev.
997 # isn't an ancestor of the srcrev.
998 f._changeid
998 f._changeid
999 pl = f.parents()
999 pl = f.parents()
1000
1000
1001 # Don't return renamed parents if we aren't following.
1001 # Don't return renamed parents if we aren't following.
1002 if not follow:
1002 if not follow:
1003 pl = [p for p in pl if p.path() == f.path()]
1003 pl = [p for p in pl if p.path() == f.path()]
1004
1004
1005 # renamed filectx won't have a filelog yet, so set it
1005 # renamed filectx won't have a filelog yet, so set it
1006 # from the cache to save time
1006 # from the cache to save time
1007 for p in pl:
1007 for p in pl:
1008 if not '_filelog' in p.__dict__:
1008 if not '_filelog' in p.__dict__:
1009 p._filelog = getlog(p.path())
1009 p._filelog = getlog(p.path())
1010
1010
1011 return pl
1011 return pl
1012
1012
1013 # use linkrev to find the first changeset where self appeared
1013 # use linkrev to find the first changeset where self appeared
1014 base = self
1014 base = self
1015 introrev = self.introrev()
1015 introrev = self.introrev()
1016 if self.rev() != introrev:
1016 if self.rev() != introrev:
1017 base = self.filectx(self.filenode(), changeid=introrev)
1017 base = self.filectx(self.filenode(), changeid=introrev)
1018 if getattr(base, '_ancestrycontext', None) is None:
1018 if getattr(base, '_ancestrycontext', None) is None:
1019 cl = self._repo.changelog
1019 cl = self._repo.changelog
1020 if introrev is None:
1020 if introrev is None:
1021 # wctx is not inclusive, but works because _ancestrycontext
1021 # wctx is not inclusive, but works because _ancestrycontext
1022 # is used to test filelog revisions
1022 # is used to test filelog revisions
1023 ac = cl.ancestors([p.rev() for p in base.parents()],
1023 ac = cl.ancestors([p.rev() for p in base.parents()],
1024 inclusive=True)
1024 inclusive=True)
1025 else:
1025 else:
1026 ac = cl.ancestors([introrev], inclusive=True)
1026 ac = cl.ancestors([introrev], inclusive=True)
1027 base._ancestrycontext = ac
1027 base._ancestrycontext = ac
1028
1028
1029 # This algorithm would prefer to be recursive, but Python is a
1029 # This algorithm would prefer to be recursive, but Python is a
1030 # bit recursion-hostile. Instead we do an iterative
1030 # bit recursion-hostile. Instead we do an iterative
1031 # depth-first search.
1031 # depth-first search.
1032
1032
1033 # 1st DFS pre-calculates pcache and needed
1033 # 1st DFS pre-calculates pcache and needed
1034 visit = [base]
1034 visit = [base]
1035 pcache = {}
1035 pcache = {}
1036 needed = {base: 1}
1036 needed = {base: 1}
1037 while visit:
1037 while visit:
1038 f = visit.pop()
1038 f = visit.pop()
1039 if f in pcache:
1039 if f in pcache:
1040 continue
1040 continue
1041 pl = parents(f)
1041 pl = parents(f)
1042 pcache[f] = pl
1042 pcache[f] = pl
1043 for p in pl:
1043 for p in pl:
1044 needed[p] = needed.get(p, 0) + 1
1044 needed[p] = needed.get(p, 0) + 1
1045 if p not in pcache:
1045 if p not in pcache:
1046 visit.append(p)
1046 visit.append(p)
1047
1047
1048 # 2nd DFS does the actual annotate
1048 # 2nd DFS does the actual annotate
1049 visit[:] = [base]
1049 visit[:] = [base]
1050 hist = {}
1050 hist = {}
1051 while visit:
1051 while visit:
1052 f = visit[-1]
1052 f = visit[-1]
1053 if f in hist:
1053 if f in hist:
1054 visit.pop()
1054 visit.pop()
1055 continue
1055 continue
1056
1056
1057 ready = True
1057 ready = True
1058 pl = pcache[f]
1058 pl = pcache[f]
1059 for p in pl:
1059 for p in pl:
1060 if p not in hist:
1060 if p not in hist:
1061 ready = False
1061 ready = False
1062 visit.append(p)
1062 visit.append(p)
1063 if ready:
1063 if ready:
1064 visit.pop()
1064 visit.pop()
1065 curr = decorate(f.data(), f)
1065 curr = decorate(f.data(), f)
1066 skipchild = False
1066 skipchild = False
1067 if skiprevs is not None:
1067 if skiprevs is not None:
1068 skipchild = f._changeid in skiprevs
1068 skipchild = f._changeid in skiprevs
1069 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1069 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1070 diffopts)
1070 diffopts)
1071 for p in pl:
1071 for p in pl:
1072 if needed[p] == 1:
1072 if needed[p] == 1:
1073 del hist[p]
1073 del hist[p]
1074 del needed[p]
1074 del needed[p]
1075 else:
1075 else:
1076 needed[p] -= 1
1076 needed[p] -= 1
1077
1077
1078 hist[f] = curr
1078 hist[f] = curr
1079 del pcache[f]
1079 del pcache[f]
1080
1080
1081 return zip(hist[base][0], hist[base][1].splitlines(True))
1081 return zip(hist[base][0], hist[base][1].splitlines(True))
1082
1082
1083 def ancestors(self, followfirst=False):
1083 def ancestors(self, followfirst=False):
1084 visit = {}
1084 visit = {}
1085 c = self
1085 c = self
1086 if followfirst:
1086 if followfirst:
1087 cut = 1
1087 cut = 1
1088 else:
1088 else:
1089 cut = None
1089 cut = None
1090
1090
1091 while True:
1091 while True:
1092 for parent in c.parents()[:cut]:
1092 for parent in c.parents()[:cut]:
1093 visit[(parent.linkrev(), parent.filenode())] = parent
1093 visit[(parent.linkrev(), parent.filenode())] = parent
1094 if not visit:
1094 if not visit:
1095 break
1095 break
1096 c = visit.pop(max(visit))
1096 c = visit.pop(max(visit))
1097 yield c
1097 yield c
1098
1098
1099 def decodeddata(self):
1099 def decodeddata(self):
1100 """Returns `data()` after running repository decoding filters.
1100 """Returns `data()` after running repository decoding filters.
1101
1101
1102 This is often equivalent to how the data would be expressed on disk.
1102 This is often equivalent to how the data would be expressed on disk.
1103 """
1103 """
1104 return self._repo.wwritedata(self.path(), self.data())
1104 return self._repo.wwritedata(self.path(), self.data())
1105
1105
1106 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1106 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1107 r'''
1107 r'''
1108 Given parent and child fctxes and annotate data for parents, for all lines
1108 Given parent and child fctxes and annotate data for parents, for all lines
1109 in either parent that match the child, annotate the child with the parent's
1109 in either parent that match the child, annotate the child with the parent's
1110 data.
1110 data.
1111
1111
1112 Additionally, if `skipchild` is True, replace all other lines with parent
1112 Additionally, if `skipchild` is True, replace all other lines with parent
1113 annotate data as well such that child is never blamed for any lines.
1113 annotate data as well such that child is never blamed for any lines.
1114
1114
1115 >>> oldfctx = 'old'
1115 >>> oldfctx = 'old'
1116 >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
1116 >>> p1fctx, p2fctx, childfctx = 'p1', 'p2', 'c'
1117 >>> olddata = 'a\nb\n'
1117 >>> olddata = 'a\nb\n'
1118 >>> p1data = 'a\nb\nc\n'
1118 >>> p1data = 'a\nb\nc\n'
1119 >>> p2data = 'a\nc\nd\n'
1119 >>> p2data = 'a\nc\nd\n'
1120 >>> childdata = 'a\nb2\nc\nc2\nd\n'
1120 >>> childdata = 'a\nb2\nc\nc2\nd\n'
1121 >>> diffopts = mdiff.diffopts()
1121 >>> diffopts = mdiff.diffopts()
1122
1122
1123 >>> def decorate(text, rev):
1123 >>> def decorate(text, rev):
1124 ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
1124 ... return ([(rev, i) for i in xrange(1, text.count('\n') + 1)], text)
1125
1125
1126 Basic usage:
1126 Basic usage:
1127
1127
1128 >>> oldann = decorate(olddata, oldfctx)
1128 >>> oldann = decorate(olddata, oldfctx)
1129 >>> p1ann = decorate(p1data, p1fctx)
1129 >>> p1ann = decorate(p1data, p1fctx)
1130 >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
1130 >>> p1ann = _annotatepair([oldann], p1fctx, p1ann, False, diffopts)
1131 >>> p1ann[0]
1131 >>> p1ann[0]
1132 [('old', 1), ('old', 2), ('p1', 3)]
1132 [('old', 1), ('old', 2), ('p1', 3)]
1133 >>> p2ann = decorate(p2data, p2fctx)
1133 >>> p2ann = decorate(p2data, p2fctx)
1134 >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
1134 >>> p2ann = _annotatepair([oldann], p2fctx, p2ann, False, diffopts)
1135 >>> p2ann[0]
1135 >>> p2ann[0]
1136 [('old', 1), ('p2', 2), ('p2', 3)]
1136 [('old', 1), ('p2', 2), ('p2', 3)]
1137
1137
1138 Test with multiple parents (note the difference caused by ordering):
1138 Test with multiple parents (note the difference caused by ordering):
1139
1139
1140 >>> childann = decorate(childdata, childfctx)
1140 >>> childann = decorate(childdata, childfctx)
1141 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
1141 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, False,
1142 ... diffopts)
1142 ... diffopts)
1143 >>> childann[0]
1143 >>> childann[0]
1144 [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
1144 [('old', 1), ('c', 2), ('p2', 2), ('c', 4), ('p2', 3)]
1145
1145
1146 >>> childann = decorate(childdata, childfctx)
1146 >>> childann = decorate(childdata, childfctx)
1147 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
1147 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, False,
1148 ... diffopts)
1148 ... diffopts)
1149 >>> childann[0]
1149 >>> childann[0]
1150 [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
1150 [('old', 1), ('c', 2), ('p1', 3), ('c', 4), ('p2', 3)]
1151
1151
1152 Test with skipchild (note the difference caused by ordering):
1152 Test with skipchild (note the difference caused by ordering):
1153
1153
1154 >>> childann = decorate(childdata, childfctx)
1154 >>> childann = decorate(childdata, childfctx)
1155 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
1155 >>> childann = _annotatepair([p1ann, p2ann], childfctx, childann, True,
1156 ... diffopts)
1156 ... diffopts)
1157 >>> childann[0]
1157 >>> childann[0]
1158 [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
1158 [('old', 1), ('old', 2), ('p2', 2), ('p2', 2), ('p2', 3)]
1159
1159
1160 >>> childann = decorate(childdata, childfctx)
1160 >>> childann = decorate(childdata, childfctx)
1161 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
1161 >>> childann = _annotatepair([p2ann, p1ann], childfctx, childann, True,
1162 ... diffopts)
1162 ... diffopts)
1163 >>> childann[0]
1163 >>> childann[0]
1164 [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
1164 [('old', 1), ('old', 2), ('p1', 3), ('p1', 3), ('p2', 3)]
1165 '''
1165 '''
1166 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1166 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1167 for parent in parents]
1167 for parent in parents]
1168
1168
1169 if skipchild:
1169 if skipchild:
1170 # Need to iterate over the blocks twice -- make it a list
1170 # Need to iterate over the blocks twice -- make it a list
1171 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1171 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1172 # Mercurial currently prefers p2 over p1 for annotate.
1172 # Mercurial currently prefers p2 over p1 for annotate.
1173 # TODO: change this?
1173 # TODO: change this?
1174 for parent, blocks in pblocks:
1174 for parent, blocks in pblocks:
1175 for (a1, a2, b1, b2), t in blocks:
1175 for (a1, a2, b1, b2), t in blocks:
1176 # Changed blocks ('!') or blocks made only of blank lines ('~')
1176 # Changed blocks ('!') or blocks made only of blank lines ('~')
1177 # belong to the child.
1177 # belong to the child.
1178 if t == '=':
1178 if t == '=':
1179 child[0][b1:b2] = parent[0][a1:a2]
1179 child[0][b1:b2] = parent[0][a1:a2]
1180
1180
1181 if skipchild:
1181 if skipchild:
1182 # Now try and match up anything that couldn't be matched,
1182 # Now try and match up anything that couldn't be matched,
1183 # Reversing pblocks maintains bias towards p2, matching above
1183 # Reversing pblocks maintains bias towards p2, matching above
1184 # behavior.
1184 # behavior.
1185 pblocks.reverse()
1185 pblocks.reverse()
1186
1186
1187 # The heuristics are:
1187 # The heuristics are:
1188 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1188 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1189 # This could potentially be smarter but works well enough.
1189 # This could potentially be smarter but works well enough.
1190 # * For a non-matching section, do a best-effort fit. Match lines in
1190 # * For a non-matching section, do a best-effort fit. Match lines in
1191 # diff hunks 1:1, dropping lines as necessary.
1191 # diff hunks 1:1, dropping lines as necessary.
1192 # * Repeat the last line as a last resort.
1192 # * Repeat the last line as a last resort.
1193
1193
1194 # First, replace as much as possible without repeating the last line.
1194 # First, replace as much as possible without repeating the last line.
1195 remaining = [(parent, []) for parent, _blocks in pblocks]
1195 remaining = [(parent, []) for parent, _blocks in pblocks]
1196 for idx, (parent, blocks) in enumerate(pblocks):
1196 for idx, (parent, blocks) in enumerate(pblocks):
1197 for (a1, a2, b1, b2), _t in blocks:
1197 for (a1, a2, b1, b2), _t in blocks:
1198 if a2 - a1 >= b2 - b1:
1198 if a2 - a1 >= b2 - b1:
1199 for bk in xrange(b1, b2):
1199 for bk in xrange(b1, b2):
1200 if child[0][bk][0] == childfctx:
1200 if child[0][bk][0] == childfctx:
1201 ak = min(a1 + (bk - b1), a2 - 1)
1201 ak = min(a1 + (bk - b1), a2 - 1)
1202 child[0][bk] = parent[0][ak]
1202 child[0][bk] = parent[0][ak]
1203 else:
1203 else:
1204 remaining[idx][1].append((a1, a2, b1, b2))
1204 remaining[idx][1].append((a1, a2, b1, b2))
1205
1205
1206 # Then, look at anything left, which might involve repeating the last
1206 # Then, look at anything left, which might involve repeating the last
1207 # line.
1207 # line.
1208 for parent, blocks in remaining:
1208 for parent, blocks in remaining:
1209 for a1, a2, b1, b2 in blocks:
1209 for a1, a2, b1, b2 in blocks:
1210 for bk in xrange(b1, b2):
1210 for bk in xrange(b1, b2):
1211 if child[0][bk][0] == childfctx:
1211 if child[0][bk][0] == childfctx:
1212 ak = min(a1 + (bk - b1), a2 - 1)
1212 ak = min(a1 + (bk - b1), a2 - 1)
1213 child[0][bk] = parent[0][ak]
1213 child[0][bk] = parent[0][ak]
1214 return child
1214 return child
1215
1215
1216 class filectx(basefilectx):
1216 class filectx(basefilectx):
1217 """A filecontext object makes access to data related to a particular
1217 """A filecontext object makes access to data related to a particular
1218 filerevision convenient."""
1218 filerevision convenient."""
1219 def __init__(self, repo, path, changeid=None, fileid=None,
1219 def __init__(self, repo, path, changeid=None, fileid=None,
1220 filelog=None, changectx=None):
1220 filelog=None, changectx=None):
1221 """changeid can be a changeset revision, node, or tag.
1221 """changeid can be a changeset revision, node, or tag.
1222 fileid can be a file revision or node."""
1222 fileid can be a file revision or node."""
1223 self._repo = repo
1223 self._repo = repo
1224 self._path = path
1224 self._path = path
1225
1225
1226 assert (changeid is not None
1226 assert (changeid is not None
1227 or fileid is not None
1227 or fileid is not None
1228 or changectx is not None), \
1228 or changectx is not None), \
1229 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1229 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1230 % (changeid, fileid, changectx))
1230 % (changeid, fileid, changectx))
1231
1231
1232 if filelog is not None:
1232 if filelog is not None:
1233 self._filelog = filelog
1233 self._filelog = filelog
1234
1234
1235 if changeid is not None:
1235 if changeid is not None:
1236 self._changeid = changeid
1236 self._changeid = changeid
1237 if changectx is not None:
1237 if changectx is not None:
1238 self._changectx = changectx
1238 self._changectx = changectx
1239 if fileid is not None:
1239 if fileid is not None:
1240 self._fileid = fileid
1240 self._fileid = fileid
1241
1241
1242 @propertycache
1242 @propertycache
1243 def _changectx(self):
1243 def _changectx(self):
1244 try:
1244 try:
1245 return changectx(self._repo, self._changeid)
1245 return changectx(self._repo, self._changeid)
1246 except error.FilteredRepoLookupError:
1246 except error.FilteredRepoLookupError:
1247 # Linkrev may point to any revision in the repository. When the
1247 # Linkrev may point to any revision in the repository. When the
1248 # repository is filtered this may lead to `filectx` trying to build
1248 # repository is filtered this may lead to `filectx` trying to build
1249 # `changectx` for filtered revision. In such case we fallback to
1249 # `changectx` for filtered revision. In such case we fallback to
1250 # creating `changectx` on the unfiltered version of the reposition.
1250 # creating `changectx` on the unfiltered version of the reposition.
1251 # This fallback should not be an issue because `changectx` from
1251 # This fallback should not be an issue because `changectx` from
1252 # `filectx` are not used in complex operations that care about
1252 # `filectx` are not used in complex operations that care about
1253 # filtering.
1253 # filtering.
1254 #
1254 #
1255 # This fallback is a cheap and dirty fix that prevent several
1255 # This fallback is a cheap and dirty fix that prevent several
1256 # crashes. It does not ensure the behavior is correct. However the
1256 # crashes. It does not ensure the behavior is correct. However the
1257 # behavior was not correct before filtering either and "incorrect
1257 # behavior was not correct before filtering either and "incorrect
1258 # behavior" is seen as better as "crash"
1258 # behavior" is seen as better as "crash"
1259 #
1259 #
1260 # Linkrevs have several serious troubles with filtering that are
1260 # Linkrevs have several serious troubles with filtering that are
1261 # complicated to solve. Proper handling of the issue here should be
1261 # complicated to solve. Proper handling of the issue here should be
1262 # considered when solving linkrev issue are on the table.
1262 # considered when solving linkrev issue are on the table.
1263 return changectx(self._repo.unfiltered(), self._changeid)
1263 return changectx(self._repo.unfiltered(), self._changeid)
1264
1264
1265 def filectx(self, fileid, changeid=None):
1265 def filectx(self, fileid, changeid=None):
1266 '''opens an arbitrary revision of the file without
1266 '''opens an arbitrary revision of the file without
1267 opening a new filelog'''
1267 opening a new filelog'''
1268 return filectx(self._repo, self._path, fileid=fileid,
1268 return filectx(self._repo, self._path, fileid=fileid,
1269 filelog=self._filelog, changeid=changeid)
1269 filelog=self._filelog, changeid=changeid)
1270
1270
1271 def rawdata(self):
1271 def rawdata(self):
1272 return self._filelog.revision(self._filenode, raw=True)
1272 return self._filelog.revision(self._filenode, raw=True)
1273
1273
1274 def rawflags(self):
1274 def rawflags(self):
1275 """low-level revlog flags"""
1275 """low-level revlog flags"""
1276 return self._filelog.flags(self._filerev)
1276 return self._filelog.flags(self._filerev)
1277
1277
1278 def data(self):
1278 def data(self):
1279 try:
1279 try:
1280 return self._filelog.read(self._filenode)
1280 return self._filelog.read(self._filenode)
1281 except error.CensoredNodeError:
1281 except error.CensoredNodeError:
1282 if self._repo.ui.config("censor", "policy") == "ignore":
1282 if self._repo.ui.config("censor", "policy") == "ignore":
1283 return ""
1283 return ""
1284 raise error.Abort(_("censored node: %s") % short(self._filenode),
1284 raise error.Abort(_("censored node: %s") % short(self._filenode),
1285 hint=_("set censor.policy to ignore errors"))
1285 hint=_("set censor.policy to ignore errors"))
1286
1286
1287 def size(self):
1287 def size(self):
1288 return self._filelog.size(self._filerev)
1288 return self._filelog.size(self._filerev)
1289
1289
1290 @propertycache
1290 @propertycache
1291 def _copied(self):
1291 def _copied(self):
1292 """check if file was actually renamed in this changeset revision
1292 """check if file was actually renamed in this changeset revision
1293
1293
1294 If rename logged in file revision, we report copy for changeset only
1294 If rename logged in file revision, we report copy for changeset only
1295 if file revisions linkrev points back to the changeset in question
1295 if file revisions linkrev points back to the changeset in question
1296 or both changeset parents contain different file revisions.
1296 or both changeset parents contain different file revisions.
1297 """
1297 """
1298
1298
1299 renamed = self._filelog.renamed(self._filenode)
1299 renamed = self._filelog.renamed(self._filenode)
1300 if not renamed:
1300 if not renamed:
1301 return renamed
1301 return renamed
1302
1302
1303 if self.rev() == self.linkrev():
1303 if self.rev() == self.linkrev():
1304 return renamed
1304 return renamed
1305
1305
1306 name = self.path()
1306 name = self.path()
1307 fnode = self._filenode
1307 fnode = self._filenode
1308 for p in self._changectx.parents():
1308 for p in self._changectx.parents():
1309 try:
1309 try:
1310 if fnode == p.filenode(name):
1310 if fnode == p.filenode(name):
1311 return None
1311 return None
1312 except error.LookupError:
1312 except error.LookupError:
1313 pass
1313 pass
1314 return renamed
1314 return renamed
1315
1315
1316 def children(self):
1316 def children(self):
1317 # hard for renames
1317 # hard for renames
1318 c = self._filelog.children(self._filenode)
1318 c = self._filelog.children(self._filenode)
1319 return [filectx(self._repo, self._path, fileid=x,
1319 return [filectx(self._repo, self._path, fileid=x,
1320 filelog=self._filelog) for x in c]
1320 filelog=self._filelog) for x in c]
1321
1321
1322 class committablectx(basectx):
1322 class committablectx(basectx):
1323 """A committablectx object provides common functionality for a context that
1323 """A committablectx object provides common functionality for a context that
1324 wants the ability to commit, e.g. workingctx or memctx."""
1324 wants the ability to commit, e.g. workingctx or memctx."""
1325 def __init__(self, repo, text="", user=None, date=None, extra=None,
1325 def __init__(self, repo, text="", user=None, date=None, extra=None,
1326 changes=None):
1326 changes=None):
1327 self._repo = repo
1327 self._repo = repo
1328 self._rev = None
1328 self._rev = None
1329 self._node = None
1329 self._node = None
1330 self._text = text
1330 self._text = text
1331 if date:
1331 if date:
1332 self._date = util.parsedate(date)
1332 self._date = util.parsedate(date)
1333 if user:
1333 if user:
1334 self._user = user
1334 self._user = user
1335 if changes:
1335 if changes:
1336 self._status = changes
1336 self._status = changes
1337
1337
1338 self._extra = {}
1338 self._extra = {}
1339 if extra:
1339 if extra:
1340 self._extra = extra.copy()
1340 self._extra = extra.copy()
1341 if 'branch' not in self._extra:
1341 if 'branch' not in self._extra:
1342 try:
1342 try:
1343 branch = encoding.fromlocal(self._repo.dirstate.branch())
1343 branch = encoding.fromlocal(self._repo.dirstate.branch())
1344 except UnicodeDecodeError:
1344 except UnicodeDecodeError:
1345 raise error.Abort(_('branch name not in UTF-8!'))
1345 raise error.Abort(_('branch name not in UTF-8!'))
1346 self._extra['branch'] = branch
1346 self._extra['branch'] = branch
1347 if self._extra['branch'] == '':
1347 if self._extra['branch'] == '':
1348 self._extra['branch'] = 'default'
1348 self._extra['branch'] = 'default'
1349
1349
1350 def __bytes__(self):
1350 def __bytes__(self):
1351 return bytes(self._parents[0]) + "+"
1351 return bytes(self._parents[0]) + "+"
1352
1352
1353 __str__ = encoding.strmethod(__bytes__)
1353 __str__ = encoding.strmethod(__bytes__)
1354
1354
1355 def __nonzero__(self):
1355 def __nonzero__(self):
1356 return True
1356 return True
1357
1357
1358 __bool__ = __nonzero__
1358 __bool__ = __nonzero__
1359
1359
1360 def _buildflagfunc(self):
1360 def _buildflagfunc(self):
1361 # Create a fallback function for getting file flags when the
1361 # Create a fallback function for getting file flags when the
1362 # filesystem doesn't support them
1362 # filesystem doesn't support them
1363
1363
1364 copiesget = self._repo.dirstate.copies().get
1364 copiesget = self._repo.dirstate.copies().get
1365 parents = self.parents()
1365 parents = self.parents()
1366 if len(parents) < 2:
1366 if len(parents) < 2:
1367 # when we have one parent, it's easy: copy from parent
1367 # when we have one parent, it's easy: copy from parent
1368 man = parents[0].manifest()
1368 man = parents[0].manifest()
1369 def func(f):
1369 def func(f):
1370 f = copiesget(f, f)
1370 f = copiesget(f, f)
1371 return man.flags(f)
1371 return man.flags(f)
1372 else:
1372 else:
1373 # merges are tricky: we try to reconstruct the unstored
1373 # merges are tricky: we try to reconstruct the unstored
1374 # result from the merge (issue1802)
1374 # result from the merge (issue1802)
1375 p1, p2 = parents
1375 p1, p2 = parents
1376 pa = p1.ancestor(p2)
1376 pa = p1.ancestor(p2)
1377 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1377 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1378
1378
1379 def func(f):
1379 def func(f):
1380 f = copiesget(f, f) # may be wrong for merges with copies
1380 f = copiesget(f, f) # may be wrong for merges with copies
1381 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1381 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1382 if fl1 == fl2:
1382 if fl1 == fl2:
1383 return fl1
1383 return fl1
1384 if fl1 == fla:
1384 if fl1 == fla:
1385 return fl2
1385 return fl2
1386 if fl2 == fla:
1386 if fl2 == fla:
1387 return fl1
1387 return fl1
1388 return '' # punt for conflicts
1388 return '' # punt for conflicts
1389
1389
1390 return func
1390 return func
1391
1391
1392 @propertycache
1392 @propertycache
1393 def _flagfunc(self):
1393 def _flagfunc(self):
1394 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1394 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1395
1395
1396 @propertycache
1396 @propertycache
1397 def _status(self):
1397 def _status(self):
1398 return self._repo.status()
1398 return self._repo.status()
1399
1399
1400 @propertycache
1400 @propertycache
1401 def _user(self):
1401 def _user(self):
1402 return self._repo.ui.username()
1402 return self._repo.ui.username()
1403
1403
1404 @propertycache
1404 @propertycache
1405 def _date(self):
1405 def _date(self):
1406 ui = self._repo.ui
1406 ui = self._repo.ui
1407 date = ui.configdate('devel', 'default-date')
1407 date = ui.configdate('devel', 'default-date')
1408 if date is None:
1408 if date is None:
1409 date = util.makedate()
1409 date = util.makedate()
1410 return date
1410 return date
1411
1411
1412 def subrev(self, subpath):
1412 def subrev(self, subpath):
1413 return None
1413 return None
1414
1414
1415 def manifestnode(self):
1415 def manifestnode(self):
1416 return None
1416 return None
1417 def user(self):
1417 def user(self):
1418 return self._user or self._repo.ui.username()
1418 return self._user or self._repo.ui.username()
1419 def date(self):
1419 def date(self):
1420 return self._date
1420 return self._date
1421 def description(self):
1421 def description(self):
1422 return self._text
1422 return self._text
1423 def files(self):
1423 def files(self):
1424 return sorted(self._status.modified + self._status.added +
1424 return sorted(self._status.modified + self._status.added +
1425 self._status.removed)
1425 self._status.removed)
1426
1426
1427 def modified(self):
1427 def modified(self):
1428 return self._status.modified
1428 return self._status.modified
1429 def added(self):
1429 def added(self):
1430 return self._status.added
1430 return self._status.added
1431 def removed(self):
1431 def removed(self):
1432 return self._status.removed
1432 return self._status.removed
1433 def deleted(self):
1433 def deleted(self):
1434 return self._status.deleted
1434 return self._status.deleted
1435 def branch(self):
1435 def branch(self):
1436 return encoding.tolocal(self._extra['branch'])
1436 return encoding.tolocal(self._extra['branch'])
1437 def closesbranch(self):
1437 def closesbranch(self):
1438 return 'close' in self._extra
1438 return 'close' in self._extra
1439 def extra(self):
1439 def extra(self):
1440 return self._extra
1440 return self._extra
1441
1441
1442 def tags(self):
1442 def tags(self):
1443 return []
1443 return []
1444
1444
1445 def bookmarks(self):
1445 def bookmarks(self):
1446 b = []
1446 b = []
1447 for p in self.parents():
1447 for p in self.parents():
1448 b.extend(p.bookmarks())
1448 b.extend(p.bookmarks())
1449 return b
1449 return b
1450
1450
1451 def phase(self):
1451 def phase(self):
1452 phase = phases.draft # default phase to draft
1452 phase = phases.draft # default phase to draft
1453 for p in self.parents():
1453 for p in self.parents():
1454 phase = max(phase, p.phase())
1454 phase = max(phase, p.phase())
1455 return phase
1455 return phase
1456
1456
1457 def hidden(self):
1457 def hidden(self):
1458 return False
1458 return False
1459
1459
1460 def children(self):
1460 def children(self):
1461 return []
1461 return []
1462
1462
1463 def flags(self, path):
1463 def flags(self, path):
1464 if r'_manifest' in self.__dict__:
1464 if r'_manifest' in self.__dict__:
1465 try:
1465 try:
1466 return self._manifest.flags(path)
1466 return self._manifest.flags(path)
1467 except KeyError:
1467 except KeyError:
1468 return ''
1468 return ''
1469
1469
1470 try:
1470 try:
1471 return self._flagfunc(path)
1471 return self._flagfunc(path)
1472 except OSError:
1472 except OSError:
1473 return ''
1473 return ''
1474
1474
1475 def ancestor(self, c2):
1475 def ancestor(self, c2):
1476 """return the "best" ancestor context of self and c2"""
1476 """return the "best" ancestor context of self and c2"""
1477 return self._parents[0].ancestor(c2) # punt on two parents for now
1477 return self._parents[0].ancestor(c2) # punt on two parents for now
1478
1478
1479 def walk(self, match):
1479 def walk(self, match):
1480 '''Generates matching file names.'''
1480 '''Generates matching file names.'''
1481 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1481 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1482 True, False))
1482 True, False))
1483
1483
1484 def matches(self, match):
1484 def matches(self, match):
1485 return sorted(self._repo.dirstate.matches(match))
1485 return sorted(self._repo.dirstate.matches(match))
1486
1486
1487 def ancestors(self):
1487 def ancestors(self):
1488 for p in self._parents:
1488 for p in self._parents:
1489 yield p
1489 yield p
1490 for a in self._repo.changelog.ancestors(
1490 for a in self._repo.changelog.ancestors(
1491 [p.rev() for p in self._parents]):
1491 [p.rev() for p in self._parents]):
1492 yield changectx(self._repo, a)
1492 yield changectx(self._repo, a)
1493
1493
1494 def markcommitted(self, node):
1494 def markcommitted(self, node):
1495 """Perform post-commit cleanup necessary after committing this ctx
1495 """Perform post-commit cleanup necessary after committing this ctx
1496
1496
1497 Specifically, this updates backing stores this working context
1497 Specifically, this updates backing stores this working context
1498 wraps to reflect the fact that the changes reflected by this
1498 wraps to reflect the fact that the changes reflected by this
1499 workingctx have been committed. For example, it marks
1499 workingctx have been committed. For example, it marks
1500 modified and added files as normal in the dirstate.
1500 modified and added files as normal in the dirstate.
1501
1501
1502 """
1502 """
1503
1503
1504 with self._repo.dirstate.parentchange():
1504 with self._repo.dirstate.parentchange():
1505 for f in self.modified() + self.added():
1505 for f in self.modified() + self.added():
1506 self._repo.dirstate.normal(f)
1506 self._repo.dirstate.normal(f)
1507 for f in self.removed():
1507 for f in self.removed():
1508 self._repo.dirstate.drop(f)
1508 self._repo.dirstate.drop(f)
1509 self._repo.dirstate.setparents(node)
1509 self._repo.dirstate.setparents(node)
1510
1510
1511 # write changes out explicitly, because nesting wlock at
1511 # write changes out explicitly, because nesting wlock at
1512 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1512 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1513 # from immediately doing so for subsequent changing files
1513 # from immediately doing so for subsequent changing files
1514 self._repo.dirstate.write(self._repo.currenttransaction())
1514 self._repo.dirstate.write(self._repo.currenttransaction())
1515
1515
1516 def dirty(self, missing=False, merge=True, branch=True):
1516 def dirty(self, missing=False, merge=True, branch=True):
1517 return False
1517 return False
1518
1518
1519 class workingctx(committablectx):
1519 class workingctx(committablectx):
1520 """A workingctx object makes access to data related to
1520 """A workingctx object makes access to data related to
1521 the current working directory convenient.
1521 the current working directory convenient.
1522 date - any valid date string or (unixtime, offset), or None.
1522 date - any valid date string or (unixtime, offset), or None.
1523 user - username string, or None.
1523 user - username string, or None.
1524 extra - a dictionary of extra values, or None.
1524 extra - a dictionary of extra values, or None.
1525 changes - a list of file lists as returned by localrepo.status()
1525 changes - a list of file lists as returned by localrepo.status()
1526 or None to use the repository status.
1526 or None to use the repository status.
1527 """
1527 """
1528 def __init__(self, repo, text="", user=None, date=None, extra=None,
1528 def __init__(self, repo, text="", user=None, date=None, extra=None,
1529 changes=None):
1529 changes=None):
1530 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1530 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1531
1531
1532 def __iter__(self):
1532 def __iter__(self):
1533 d = self._repo.dirstate
1533 d = self._repo.dirstate
1534 for f in d:
1534 for f in d:
1535 if d[f] != 'r':
1535 if d[f] != 'r':
1536 yield f
1536 yield f
1537
1537
1538 def __contains__(self, key):
1538 def __contains__(self, key):
1539 return self._repo.dirstate[key] not in "?r"
1539 return self._repo.dirstate[key] not in "?r"
1540
1540
1541 def hex(self):
1541 def hex(self):
1542 return hex(wdirid)
1542 return hex(wdirid)
1543
1543
1544 @propertycache
1544 @propertycache
1545 def _parents(self):
1545 def _parents(self):
1546 p = self._repo.dirstate.parents()
1546 p = self._repo.dirstate.parents()
1547 if p[1] == nullid:
1547 if p[1] == nullid:
1548 p = p[:-1]
1548 p = p[:-1]
1549 return [changectx(self._repo, x) for x in p]
1549 return [changectx(self._repo, x) for x in p]
1550
1550
1551 def filectx(self, path, filelog=None):
1551 def filectx(self, path, filelog=None):
1552 """get a file context from the working directory"""
1552 """get a file context from the working directory"""
1553 return workingfilectx(self._repo, path, workingctx=self,
1553 return workingfilectx(self._repo, path, workingctx=self,
1554 filelog=filelog)
1554 filelog=filelog)
1555
1555
1556 def dirty(self, missing=False, merge=True, branch=True):
1556 def dirty(self, missing=False, merge=True, branch=True):
1557 "check whether a working directory is modified"
1557 "check whether a working directory is modified"
1558 # check subrepos first
1558 # check subrepos first
1559 for s in sorted(self.substate):
1559 for s in sorted(self.substate):
1560 if self.sub(s).dirty(missing=missing):
1560 if self.sub(s).dirty(missing=missing):
1561 return True
1561 return True
1562 # check current working dir
1562 # check current working dir
1563 return ((merge and self.p2()) or
1563 return ((merge and self.p2()) or
1564 (branch and self.branch() != self.p1().branch()) or
1564 (branch and self.branch() != self.p1().branch()) or
1565 self.modified() or self.added() or self.removed() or
1565 self.modified() or self.added() or self.removed() or
1566 (missing and self.deleted()))
1566 (missing and self.deleted()))
1567
1567
1568 def add(self, list, prefix=""):
1568 def add(self, list, prefix=""):
1569 with self._repo.wlock():
1569 with self._repo.wlock():
1570 ui, ds = self._repo.ui, self._repo.dirstate
1570 ui, ds = self._repo.ui, self._repo.dirstate
1571 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1571 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1572 rejected = []
1572 rejected = []
1573 lstat = self._repo.wvfs.lstat
1573 lstat = self._repo.wvfs.lstat
1574 for f in list:
1574 for f in list:
1575 # ds.pathto() returns an absolute file when this is invoked from
1575 # ds.pathto() returns an absolute file when this is invoked from
1576 # the keyword extension. That gets flagged as non-portable on
1576 # the keyword extension. That gets flagged as non-portable on
1577 # Windows, since it contains the drive letter and colon.
1577 # Windows, since it contains the drive letter and colon.
1578 scmutil.checkportable(ui, os.path.join(prefix, f))
1578 scmutil.checkportable(ui, os.path.join(prefix, f))
1579 try:
1579 try:
1580 st = lstat(f)
1580 st = lstat(f)
1581 except OSError:
1581 except OSError:
1582 ui.warn(_("%s does not exist!\n") % uipath(f))
1582 ui.warn(_("%s does not exist!\n") % uipath(f))
1583 rejected.append(f)
1583 rejected.append(f)
1584 continue
1584 continue
1585 if st.st_size > 10000000:
1585 if st.st_size > 10000000:
1586 ui.warn(_("%s: up to %d MB of RAM may be required "
1586 ui.warn(_("%s: up to %d MB of RAM may be required "
1587 "to manage this file\n"
1587 "to manage this file\n"
1588 "(use 'hg revert %s' to cancel the "
1588 "(use 'hg revert %s' to cancel the "
1589 "pending addition)\n")
1589 "pending addition)\n")
1590 % (f, 3 * st.st_size // 1000000, uipath(f)))
1590 % (f, 3 * st.st_size // 1000000, uipath(f)))
1591 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1591 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1592 ui.warn(_("%s not added: only files and symlinks "
1592 ui.warn(_("%s not added: only files and symlinks "
1593 "supported currently\n") % uipath(f))
1593 "supported currently\n") % uipath(f))
1594 rejected.append(f)
1594 rejected.append(f)
1595 elif ds[f] in 'amn':
1595 elif ds[f] in 'amn':
1596 ui.warn(_("%s already tracked!\n") % uipath(f))
1596 ui.warn(_("%s already tracked!\n") % uipath(f))
1597 elif ds[f] == 'r':
1597 elif ds[f] == 'r':
1598 ds.normallookup(f)
1598 ds.normallookup(f)
1599 else:
1599 else:
1600 ds.add(f)
1600 ds.add(f)
1601 return rejected
1601 return rejected
1602
1602
1603 def forget(self, files, prefix=""):
1603 def forget(self, files, prefix=""):
1604 with self._repo.wlock():
1604 with self._repo.wlock():
1605 ds = self._repo.dirstate
1605 ds = self._repo.dirstate
1606 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1606 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1607 rejected = []
1607 rejected = []
1608 for f in files:
1608 for f in files:
1609 if f not in self._repo.dirstate:
1609 if f not in self._repo.dirstate:
1610 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1610 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1611 rejected.append(f)
1611 rejected.append(f)
1612 elif self._repo.dirstate[f] != 'a':
1612 elif self._repo.dirstate[f] != 'a':
1613 self._repo.dirstate.remove(f)
1613 self._repo.dirstate.remove(f)
1614 else:
1614 else:
1615 self._repo.dirstate.drop(f)
1615 self._repo.dirstate.drop(f)
1616 return rejected
1616 return rejected
1617
1617
1618 def undelete(self, list):
1618 def undelete(self, list):
1619 pctxs = self.parents()
1619 pctxs = self.parents()
1620 with self._repo.wlock():
1620 with self._repo.wlock():
1621 ds = self._repo.dirstate
1621 ds = self._repo.dirstate
1622 for f in list:
1622 for f in list:
1623 if self._repo.dirstate[f] != 'r':
1623 if self._repo.dirstate[f] != 'r':
1624 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1624 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1625 else:
1625 else:
1626 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1626 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1627 t = fctx.data()
1627 t = fctx.data()
1628 self._repo.wwrite(f, t, fctx.flags())
1628 self._repo.wwrite(f, t, fctx.flags())
1629 self._repo.dirstate.normal(f)
1629 self._repo.dirstate.normal(f)
1630
1630
1631 def copy(self, source, dest):
1631 def copy(self, source, dest):
1632 try:
1632 try:
1633 st = self._repo.wvfs.lstat(dest)
1633 st = self._repo.wvfs.lstat(dest)
1634 except OSError as err:
1634 except OSError as err:
1635 if err.errno != errno.ENOENT:
1635 if err.errno != errno.ENOENT:
1636 raise
1636 raise
1637 self._repo.ui.warn(_("%s does not exist!\n")
1637 self._repo.ui.warn(_("%s does not exist!\n")
1638 % self._repo.dirstate.pathto(dest))
1638 % self._repo.dirstate.pathto(dest))
1639 return
1639 return
1640 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1640 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1641 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1641 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1642 "symbolic link\n")
1642 "symbolic link\n")
1643 % self._repo.dirstate.pathto(dest))
1643 % self._repo.dirstate.pathto(dest))
1644 else:
1644 else:
1645 with self._repo.wlock():
1645 with self._repo.wlock():
1646 if self._repo.dirstate[dest] in '?':
1646 if self._repo.dirstate[dest] in '?':
1647 self._repo.dirstate.add(dest)
1647 self._repo.dirstate.add(dest)
1648 elif self._repo.dirstate[dest] in 'r':
1648 elif self._repo.dirstate[dest] in 'r':
1649 self._repo.dirstate.normallookup(dest)
1649 self._repo.dirstate.normallookup(dest)
1650 self._repo.dirstate.copy(source, dest)
1650 self._repo.dirstate.copy(source, dest)
1651
1651
1652 def match(self, pats=None, include=None, exclude=None, default='glob',
1652 def match(self, pats=None, include=None, exclude=None, default='glob',
1653 listsubrepos=False, badfn=None):
1653 listsubrepos=False, badfn=None):
1654 r = self._repo
1654 r = self._repo
1655
1655
1656 # Only a case insensitive filesystem needs magic to translate user input
1656 # Only a case insensitive filesystem needs magic to translate user input
1657 # to actual case in the filesystem.
1657 # to actual case in the filesystem.
1658 icasefs = not util.fscasesensitive(r.root)
1658 icasefs = not util.fscasesensitive(r.root)
1659 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1659 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1660 default, auditor=r.auditor, ctx=self,
1660 default, auditor=r.auditor, ctx=self,
1661 listsubrepos=listsubrepos, badfn=badfn,
1661 listsubrepos=listsubrepos, badfn=badfn,
1662 icasefs=icasefs)
1662 icasefs=icasefs)
1663
1663
1664 def _filtersuspectsymlink(self, files):
1664 def _filtersuspectsymlink(self, files):
1665 if not files or self._repo.dirstate._checklink:
1665 if not files or self._repo.dirstate._checklink:
1666 return files
1666 return files
1667
1667
1668 # Symlink placeholders may get non-symlink-like contents
1668 # Symlink placeholders may get non-symlink-like contents
1669 # via user error or dereferencing by NFS or Samba servers,
1669 # via user error or dereferencing by NFS or Samba servers,
1670 # so we filter out any placeholders that don't look like a
1670 # so we filter out any placeholders that don't look like a
1671 # symlink
1671 # symlink
1672 sane = []
1672 sane = []
1673 for f in files:
1673 for f in files:
1674 if self.flags(f) == 'l':
1674 if self.flags(f) == 'l':
1675 d = self[f].data()
1675 d = self[f].data()
1676 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1676 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1677 self._repo.ui.debug('ignoring suspect symlink placeholder'
1677 self._repo.ui.debug('ignoring suspect symlink placeholder'
1678 ' "%s"\n' % f)
1678 ' "%s"\n' % f)
1679 continue
1679 continue
1680 sane.append(f)
1680 sane.append(f)
1681 return sane
1681 return sane
1682
1682
1683 def _checklookup(self, files):
1683 def _checklookup(self, files):
1684 # check for any possibly clean files
1684 # check for any possibly clean files
1685 if not files:
1685 if not files:
1686 return [], [], []
1686 return [], [], []
1687
1687
1688 modified = []
1688 modified = []
1689 deleted = []
1689 deleted = []
1690 fixup = []
1690 fixup = []
1691 pctx = self._parents[0]
1691 pctx = self._parents[0]
1692 # do a full compare of any files that might have changed
1692 # do a full compare of any files that might have changed
1693 for f in sorted(files):
1693 for f in sorted(files):
1694 try:
1694 try:
1695 # This will return True for a file that got replaced by a
1695 # This will return True for a file that got replaced by a
1696 # directory in the interim, but fixing that is pretty hard.
1696 # directory in the interim, but fixing that is pretty hard.
1697 if (f not in pctx or self.flags(f) != pctx.flags(f)
1697 if (f not in pctx or self.flags(f) != pctx.flags(f)
1698 or pctx[f].cmp(self[f])):
1698 or pctx[f].cmp(self[f])):
1699 modified.append(f)
1699 modified.append(f)
1700 else:
1700 else:
1701 fixup.append(f)
1701 fixup.append(f)
1702 except (IOError, OSError):
1702 except (IOError, OSError):
1703 # A file become inaccessible in between? Mark it as deleted,
1703 # A file become inaccessible in between? Mark it as deleted,
1704 # matching dirstate behavior (issue5584).
1704 # matching dirstate behavior (issue5584).
1705 # The dirstate has more complex behavior around whether a
1705 # The dirstate has more complex behavior around whether a
1706 # missing file matches a directory, etc, but we don't need to
1706 # missing file matches a directory, etc, but we don't need to
1707 # bother with that: if f has made it to this point, we're sure
1707 # bother with that: if f has made it to this point, we're sure
1708 # it's in the dirstate.
1708 # it's in the dirstate.
1709 deleted.append(f)
1709 deleted.append(f)
1710
1710
1711 return modified, deleted, fixup
1711 return modified, deleted, fixup
1712
1712
1713 def _poststatusfixup(self, status, fixup):
1713 def _poststatusfixup(self, status, fixup):
1714 """update dirstate for files that are actually clean"""
1714 """update dirstate for files that are actually clean"""
1715 poststatus = self._repo.postdsstatus()
1715 poststatus = self._repo.postdsstatus()
1716 if fixup or poststatus:
1716 if fixup or poststatus:
1717 try:
1717 try:
1718 oldid = self._repo.dirstate.identity()
1718 oldid = self._repo.dirstate.identity()
1719
1719
1720 # updating the dirstate is optional
1720 # updating the dirstate is optional
1721 # so we don't wait on the lock
1721 # so we don't wait on the lock
1722 # wlock can invalidate the dirstate, so cache normal _after_
1722 # wlock can invalidate the dirstate, so cache normal _after_
1723 # taking the lock
1723 # taking the lock
1724 with self._repo.wlock(False):
1724 with self._repo.wlock(False):
1725 if self._repo.dirstate.identity() == oldid:
1725 if self._repo.dirstate.identity() == oldid:
1726 if fixup:
1726 if fixup:
1727 normal = self._repo.dirstate.normal
1727 normal = self._repo.dirstate.normal
1728 for f in fixup:
1728 for f in fixup:
1729 normal(f)
1729 normal(f)
1730 # write changes out explicitly, because nesting
1730 # write changes out explicitly, because nesting
1731 # wlock at runtime may prevent 'wlock.release()'
1731 # wlock at runtime may prevent 'wlock.release()'
1732 # after this block from doing so for subsequent
1732 # after this block from doing so for subsequent
1733 # changing files
1733 # changing files
1734 tr = self._repo.currenttransaction()
1734 tr = self._repo.currenttransaction()
1735 self._repo.dirstate.write(tr)
1735 self._repo.dirstate.write(tr)
1736
1736
1737 if poststatus:
1737 if poststatus:
1738 for ps in poststatus:
1738 for ps in poststatus:
1739 ps(self, status)
1739 ps(self, status)
1740 else:
1740 else:
1741 # in this case, writing changes out breaks
1741 # in this case, writing changes out breaks
1742 # consistency, because .hg/dirstate was
1742 # consistency, because .hg/dirstate was
1743 # already changed simultaneously after last
1743 # already changed simultaneously after last
1744 # caching (see also issue5584 for detail)
1744 # caching (see also issue5584 for detail)
1745 self._repo.ui.debug('skip updating dirstate: '
1745 self._repo.ui.debug('skip updating dirstate: '
1746 'identity mismatch\n')
1746 'identity mismatch\n')
1747 except error.LockError:
1747 except error.LockError:
1748 pass
1748 pass
1749 finally:
1749 finally:
1750 # Even if the wlock couldn't be grabbed, clear out the list.
1750 # Even if the wlock couldn't be grabbed, clear out the list.
1751 self._repo.clearpostdsstatus()
1751 self._repo.clearpostdsstatus()
1752
1752
1753 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1753 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1754 '''Gets the status from the dirstate -- internal use only.'''
1754 '''Gets the status from the dirstate -- internal use only.'''
1755 listignored, listclean, listunknown = ignored, clean, unknown
1755 listignored, listclean, listunknown = ignored, clean, unknown
1756 subrepos = []
1756 subrepos = []
1757 if '.hgsub' in self:
1757 if '.hgsub' in self:
1758 subrepos = sorted(self.substate)
1758 subrepos = sorted(self.substate)
1759 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1759 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1760 listclean, listunknown)
1760 listclean, listunknown)
1761
1761
1762 # check for any possibly clean files
1762 # check for any possibly clean files
1763 fixup = []
1763 fixup = []
1764 if cmp:
1764 if cmp:
1765 modified2, deleted2, fixup = self._checklookup(cmp)
1765 modified2, deleted2, fixup = self._checklookup(cmp)
1766 s.modified.extend(modified2)
1766 s.modified.extend(modified2)
1767 s.deleted.extend(deleted2)
1767 s.deleted.extend(deleted2)
1768
1768
1769 if fixup and listclean:
1769 if fixup and listclean:
1770 s.clean.extend(fixup)
1770 s.clean.extend(fixup)
1771
1771
1772 self._poststatusfixup(s, fixup)
1772 self._poststatusfixup(s, fixup)
1773
1773
1774 if match.always():
1774 if match.always():
1775 # cache for performance
1775 # cache for performance
1776 if s.unknown or s.ignored or s.clean:
1776 if s.unknown or s.ignored or s.clean:
1777 # "_status" is cached with list*=False in the normal route
1777 # "_status" is cached with list*=False in the normal route
1778 self._status = scmutil.status(s.modified, s.added, s.removed,
1778 self._status = scmutil.status(s.modified, s.added, s.removed,
1779 s.deleted, [], [], [])
1779 s.deleted, [], [], [])
1780 else:
1780 else:
1781 self._status = s
1781 self._status = s
1782
1782
1783 return s
1783 return s
1784
1784
1785 @propertycache
1785 @propertycache
1786 def _manifest(self):
1786 def _manifest(self):
1787 """generate a manifest corresponding to the values in self._status
1787 """generate a manifest corresponding to the values in self._status
1788
1788
1789 This reuse the file nodeid from parent, but we use special node
1789 This reuse the file nodeid from parent, but we use special node
1790 identifiers for added and modified files. This is used by manifests
1790 identifiers for added and modified files. This is used by manifests
1791 merge to see that files are different and by update logic to avoid
1791 merge to see that files are different and by update logic to avoid
1792 deleting newly added files.
1792 deleting newly added files.
1793 """
1793 """
1794 return self._buildstatusmanifest(self._status)
1794 return self._buildstatusmanifest(self._status)
1795
1795
1796 def _buildstatusmanifest(self, status):
1796 def _buildstatusmanifest(self, status):
1797 """Builds a manifest that includes the given status results."""
1797 """Builds a manifest that includes the given status results."""
1798 parents = self.parents()
1798 parents = self.parents()
1799
1799
1800 man = parents[0].manifest().copy()
1800 man = parents[0].manifest().copy()
1801
1801
1802 ff = self._flagfunc
1802 ff = self._flagfunc
1803 for i, l in ((addednodeid, status.added),
1803 for i, l in ((addednodeid, status.added),
1804 (modifiednodeid, status.modified)):
1804 (modifiednodeid, status.modified)):
1805 for f in l:
1805 for f in l:
1806 man[f] = i
1806 man[f] = i
1807 try:
1807 try:
1808 man.setflag(f, ff(f))
1808 man.setflag(f, ff(f))
1809 except OSError:
1809 except OSError:
1810 pass
1810 pass
1811
1811
1812 for f in status.deleted + status.removed:
1812 for f in status.deleted + status.removed:
1813 if f in man:
1813 if f in man:
1814 del man[f]
1814 del man[f]
1815
1815
1816 return man
1816 return man
1817
1817
1818 def _buildstatus(self, other, s, match, listignored, listclean,
1818 def _buildstatus(self, other, s, match, listignored, listclean,
1819 listunknown):
1819 listunknown):
1820 """build a status with respect to another context
1820 """build a status with respect to another context
1821
1821
1822 This includes logic for maintaining the fast path of status when
1822 This includes logic for maintaining the fast path of status when
1823 comparing the working directory against its parent, which is to skip
1823 comparing the working directory against its parent, which is to skip
1824 building a new manifest if self (working directory) is not comparing
1824 building a new manifest if self (working directory) is not comparing
1825 against its parent (repo['.']).
1825 against its parent (repo['.']).
1826 """
1826 """
1827 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1827 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1828 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1828 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1829 # might have accidentally ended up with the entire contents of the file
1829 # might have accidentally ended up with the entire contents of the file
1830 # they are supposed to be linking to.
1830 # they are supposed to be linking to.
1831 s.modified[:] = self._filtersuspectsymlink(s.modified)
1831 s.modified[:] = self._filtersuspectsymlink(s.modified)
1832 if other != self._repo['.']:
1832 if other != self._repo['.']:
1833 s = super(workingctx, self)._buildstatus(other, s, match,
1833 s = super(workingctx, self)._buildstatus(other, s, match,
1834 listignored, listclean,
1834 listignored, listclean,
1835 listunknown)
1835 listunknown)
1836 return s
1836 return s
1837
1837
1838 def _matchstatus(self, other, match):
1838 def _matchstatus(self, other, match):
1839 """override the match method with a filter for directory patterns
1839 """override the match method with a filter for directory patterns
1840
1840
1841 We use inheritance to customize the match.bad method only in cases of
1841 We use inheritance to customize the match.bad method only in cases of
1842 workingctx since it belongs only to the working directory when
1842 workingctx since it belongs only to the working directory when
1843 comparing against the parent changeset.
1843 comparing against the parent changeset.
1844
1844
1845 If we aren't comparing against the working directory's parent, then we
1845 If we aren't comparing against the working directory's parent, then we
1846 just use the default match object sent to us.
1846 just use the default match object sent to us.
1847 """
1847 """
1848 if other != self._repo['.']:
1848 if other != self._repo['.']:
1849 def bad(f, msg):
1849 def bad(f, msg):
1850 # 'f' may be a directory pattern from 'match.files()',
1850 # 'f' may be a directory pattern from 'match.files()',
1851 # so 'f not in ctx1' is not enough
1851 # so 'f not in ctx1' is not enough
1852 if f not in other and not other.hasdir(f):
1852 if f not in other and not other.hasdir(f):
1853 self._repo.ui.warn('%s: %s\n' %
1853 self._repo.ui.warn('%s: %s\n' %
1854 (self._repo.dirstate.pathto(f), msg))
1854 (self._repo.dirstate.pathto(f), msg))
1855 match.bad = bad
1855 match.bad = bad
1856 return match
1856 return match
1857
1857
1858 def markcommitted(self, node):
1858 def markcommitted(self, node):
1859 super(workingctx, self).markcommitted(node)
1859 super(workingctx, self).markcommitted(node)
1860
1860
1861 sparse.aftercommit(self._repo, node)
1861 sparse.aftercommit(self._repo, node)
1862
1862
1863 class committablefilectx(basefilectx):
1863 class committablefilectx(basefilectx):
1864 """A committablefilectx provides common functionality for a file context
1864 """A committablefilectx provides common functionality for a file context
1865 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1865 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1866 def __init__(self, repo, path, filelog=None, ctx=None):
1866 def __init__(self, repo, path, filelog=None, ctx=None):
1867 self._repo = repo
1867 self._repo = repo
1868 self._path = path
1868 self._path = path
1869 self._changeid = None
1869 self._changeid = None
1870 self._filerev = self._filenode = None
1870 self._filerev = self._filenode = None
1871
1871
1872 if filelog is not None:
1872 if filelog is not None:
1873 self._filelog = filelog
1873 self._filelog = filelog
1874 if ctx:
1874 if ctx:
1875 self._changectx = ctx
1875 self._changectx = ctx
1876
1876
1877 def __nonzero__(self):
1877 def __nonzero__(self):
1878 return True
1878 return True
1879
1879
1880 __bool__ = __nonzero__
1880 __bool__ = __nonzero__
1881
1881
1882 def linkrev(self):
1882 def linkrev(self):
1883 # linked to self._changectx no matter if file is modified or not
1883 # linked to self._changectx no matter if file is modified or not
1884 return self.rev()
1884 return self.rev()
1885
1885
1886 def parents(self):
1886 def parents(self):
1887 '''return parent filectxs, following copies if necessary'''
1887 '''return parent filectxs, following copies if necessary'''
1888 def filenode(ctx, path):
1888 def filenode(ctx, path):
1889 return ctx._manifest.get(path, nullid)
1889 return ctx._manifest.get(path, nullid)
1890
1890
1891 path = self._path
1891 path = self._path
1892 fl = self._filelog
1892 fl = self._filelog
1893 pcl = self._changectx._parents
1893 pcl = self._changectx._parents
1894 renamed = self.renamed()
1894 renamed = self.renamed()
1895
1895
1896 if renamed:
1896 if renamed:
1897 pl = [renamed + (None,)]
1897 pl = [renamed + (None,)]
1898 else:
1898 else:
1899 pl = [(path, filenode(pcl[0], path), fl)]
1899 pl = [(path, filenode(pcl[0], path), fl)]
1900
1900
1901 for pc in pcl[1:]:
1901 for pc in pcl[1:]:
1902 pl.append((path, filenode(pc, path), fl))
1902 pl.append((path, filenode(pc, path), fl))
1903
1903
1904 return [self._parentfilectx(p, fileid=n, filelog=l)
1904 return [self._parentfilectx(p, fileid=n, filelog=l)
1905 for p, n, l in pl if n != nullid]
1905 for p, n, l in pl if n != nullid]
1906
1906
1907 def children(self):
1907 def children(self):
1908 return []
1908 return []
1909
1909
1910 class workingfilectx(committablefilectx):
1910 class workingfilectx(committablefilectx):
1911 """A workingfilectx object makes access to data related to a particular
1911 """A workingfilectx object makes access to data related to a particular
1912 file in the working directory convenient."""
1912 file in the working directory convenient."""
1913 def __init__(self, repo, path, filelog=None, workingctx=None):
1913 def __init__(self, repo, path, filelog=None, workingctx=None):
1914 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1914 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1915
1915
1916 @propertycache
1916 @propertycache
1917 def _changectx(self):
1917 def _changectx(self):
1918 return workingctx(self._repo)
1918 return workingctx(self._repo)
1919
1919
1920 def data(self):
1920 def data(self):
1921 return self._repo.wread(self._path)
1921 return self._repo.wread(self._path)
1922 def renamed(self):
1922 def renamed(self):
1923 rp = self._repo.dirstate.copied(self._path)
1923 rp = self._repo.dirstate.copied(self._path)
1924 if not rp:
1924 if not rp:
1925 return None
1925 return None
1926 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1926 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1927
1927
1928 def size(self):
1928 def size(self):
1929 return self._repo.wvfs.lstat(self._path).st_size
1929 return self._repo.wvfs.lstat(self._path).st_size
1930 def date(self):
1930 def date(self):
1931 t, tz = self._changectx.date()
1931 t, tz = self._changectx.date()
1932 try:
1932 try:
1933 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1933 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1934 except OSError as err:
1934 except OSError as err:
1935 if err.errno != errno.ENOENT:
1935 if err.errno != errno.ENOENT:
1936 raise
1936 raise
1937 return (t, tz)
1937 return (t, tz)
1938
1938
1939 def exists(self):
1939 def exists(self):
1940 return self._repo.wvfs.exists(self._path)
1940 return self._repo.wvfs.exists(self._path)
1941
1941
1942 def lexists(self):
1942 def lexists(self):
1943 return self._repo.wvfs.lexists(self._path)
1943 return self._repo.wvfs.lexists(self._path)
1944
1944
1945 def audit(self):
1945 def audit(self):
1946 return self._repo.wvfs.audit(self._path)
1946 return self._repo.wvfs.audit(self._path)
1947
1947
1948 def cmp(self, fctx):
1948 def cmp(self, fctx):
1949 """compare with other file context
1949 """compare with other file context
1950
1950
1951 returns True if different than fctx.
1951 returns True if different than fctx.
1952 """
1952 """
1953 # fctx should be a filectx (not a workingfilectx)
1953 # fctx should be a filectx (not a workingfilectx)
1954 # invert comparison to reuse the same code path
1954 # invert comparison to reuse the same code path
1955 return fctx.cmp(self)
1955 return fctx.cmp(self)
1956
1956
1957 def remove(self, ignoremissing=False):
1957 def remove(self, ignoremissing=False):
1958 """wraps unlink for a repo's working directory"""
1958 """wraps unlink for a repo's working directory"""
1959 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1959 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1960
1960
1961 def write(self, data, flags, backgroundclose=False):
1961 def write(self, data, flags, backgroundclose=False):
1962 """wraps repo.wwrite"""
1962 """wraps repo.wwrite"""
1963 self._repo.wwrite(self._path, data, flags,
1963 self._repo.wwrite(self._path, data, flags,
1964 backgroundclose=backgroundclose)
1964 backgroundclose=backgroundclose)
1965
1965
1966 def clearunknown(self):
1966 def clearunknown(self):
1967 """Removes conflicting items in the working directory so that
1967 """Removes conflicting items in the working directory so that
1968 ``write()`` can be called successfully.
1968 ``write()`` can be called successfully.
1969 """
1969 """
1970 wvfs = self._repo.wvfs
1970 wvfs = self._repo.wvfs
1971 if wvfs.isdir(self._path) and not wvfs.islink(self._path):
1971 if wvfs.isdir(self._path) and not wvfs.islink(self._path):
1972 wvfs.removedirs(self._path)
1972 wvfs.removedirs(self._path)
1973
1973
1974 def setflags(self, l, x):
1974 def setflags(self, l, x):
1975 self._repo.wvfs.setflags(self._path, l, x)
1975 self._repo.wvfs.setflags(self._path, l, x)
1976
1976
1977 class workingcommitctx(workingctx):
1977 class workingcommitctx(workingctx):
1978 """A workingcommitctx object makes access to data related to
1978 """A workingcommitctx object makes access to data related to
1979 the revision being committed convenient.
1979 the revision being committed convenient.
1980
1980
1981 This hides changes in the working directory, if they aren't
1981 This hides changes in the working directory, if they aren't
1982 committed in this context.
1982 committed in this context.
1983 """
1983 """
1984 def __init__(self, repo, changes,
1984 def __init__(self, repo, changes,
1985 text="", user=None, date=None, extra=None):
1985 text="", user=None, date=None, extra=None):
1986 super(workingctx, self).__init__(repo, text, user, date, extra,
1986 super(workingctx, self).__init__(repo, text, user, date, extra,
1987 changes)
1987 changes)
1988
1988
1989 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1989 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1990 """Return matched files only in ``self._status``
1990 """Return matched files only in ``self._status``
1991
1991
1992 Uncommitted files appear "clean" via this context, even if
1992 Uncommitted files appear "clean" via this context, even if
1993 they aren't actually so in the working directory.
1993 they aren't actually so in the working directory.
1994 """
1994 """
1995 if clean:
1995 if clean:
1996 clean = [f for f in self._manifest if f not in self._changedset]
1996 clean = [f for f in self._manifest if f not in self._changedset]
1997 else:
1997 else:
1998 clean = []
1998 clean = []
1999 return scmutil.status([f for f in self._status.modified if match(f)],
1999 return scmutil.status([f for f in self._status.modified if match(f)],
2000 [f for f in self._status.added if match(f)],
2000 [f for f in self._status.added if match(f)],
2001 [f for f in self._status.removed if match(f)],
2001 [f for f in self._status.removed if match(f)],
2002 [], [], [], clean)
2002 [], [], [], clean)
2003
2003
2004 @propertycache
2004 @propertycache
2005 def _changedset(self):
2005 def _changedset(self):
2006 """Return the set of files changed in this context
2006 """Return the set of files changed in this context
2007 """
2007 """
2008 changed = set(self._status.modified)
2008 changed = set(self._status.modified)
2009 changed.update(self._status.added)
2009 changed.update(self._status.added)
2010 changed.update(self._status.removed)
2010 changed.update(self._status.removed)
2011 return changed
2011 return changed
2012
2012
2013 def makecachingfilectxfn(func):
2013 def makecachingfilectxfn(func):
2014 """Create a filectxfn that caches based on the path.
2014 """Create a filectxfn that caches based on the path.
2015
2015
2016 We can't use util.cachefunc because it uses all arguments as the cache
2016 We can't use util.cachefunc because it uses all arguments as the cache
2017 key and this creates a cycle since the arguments include the repo and
2017 key and this creates a cycle since the arguments include the repo and
2018 memctx.
2018 memctx.
2019 """
2019 """
2020 cache = {}
2020 cache = {}
2021
2021
2022 def getfilectx(repo, memctx, path):
2022 def getfilectx(repo, memctx, path):
2023 if path not in cache:
2023 if path not in cache:
2024 cache[path] = func(repo, memctx, path)
2024 cache[path] = func(repo, memctx, path)
2025 return cache[path]
2025 return cache[path]
2026
2026
2027 return getfilectx
2027 return getfilectx
2028
2028
2029 def memfilefromctx(ctx):
2029 def memfilefromctx(ctx):
2030 """Given a context return a memfilectx for ctx[path]
2030 """Given a context return a memfilectx for ctx[path]
2031
2031
2032 This is a convenience method for building a memctx based on another
2032 This is a convenience method for building a memctx based on another
2033 context.
2033 context.
2034 """
2034 """
2035 def getfilectx(repo, memctx, path):
2035 def getfilectx(repo, memctx, path):
2036 fctx = ctx[path]
2036 fctx = ctx[path]
2037 # this is weird but apparently we only keep track of one parent
2037 # this is weird but apparently we only keep track of one parent
2038 # (why not only store that instead of a tuple?)
2038 # (why not only store that instead of a tuple?)
2039 copied = fctx.renamed()
2039 copied = fctx.renamed()
2040 if copied:
2040 if copied:
2041 copied = copied[0]
2041 copied = copied[0]
2042 return memfilectx(repo, path, fctx.data(),
2042 return memfilectx(repo, path, fctx.data(),
2043 islink=fctx.islink(), isexec=fctx.isexec(),
2043 islink=fctx.islink(), isexec=fctx.isexec(),
2044 copied=copied, memctx=memctx)
2044 copied=copied, memctx=memctx)
2045
2045
2046 return getfilectx
2046 return getfilectx
2047
2047
2048 def memfilefrompatch(patchstore):
2048 def memfilefrompatch(patchstore):
2049 """Given a patch (e.g. patchstore object) return a memfilectx
2049 """Given a patch (e.g. patchstore object) return a memfilectx
2050
2050
2051 This is a convenience method for building a memctx based on a patchstore.
2051 This is a convenience method for building a memctx based on a patchstore.
2052 """
2052 """
2053 def getfilectx(repo, memctx, path):
2053 def getfilectx(repo, memctx, path):
2054 data, mode, copied = patchstore.getfile(path)
2054 data, mode, copied = patchstore.getfile(path)
2055 if data is None:
2055 if data is None:
2056 return None
2056 return None
2057 islink, isexec = mode
2057 islink, isexec = mode
2058 return memfilectx(repo, path, data, islink=islink,
2058 return memfilectx(repo, path, data, islink=islink,
2059 isexec=isexec, copied=copied,
2059 isexec=isexec, copied=copied,
2060 memctx=memctx)
2060 memctx=memctx)
2061
2061
2062 return getfilectx
2062 return getfilectx
2063
2063
2064 class memctx(committablectx):
2064 class memctx(committablectx):
2065 """Use memctx to perform in-memory commits via localrepo.commitctx().
2065 """Use memctx to perform in-memory commits via localrepo.commitctx().
2066
2066
2067 Revision information is supplied at initialization time while
2067 Revision information is supplied at initialization time while
2068 related files data and is made available through a callback
2068 related files data and is made available through a callback
2069 mechanism. 'repo' is the current localrepo, 'parents' is a
2069 mechanism. 'repo' is the current localrepo, 'parents' is a
2070 sequence of two parent revisions identifiers (pass None for every
2070 sequence of two parent revisions identifiers (pass None for every
2071 missing parent), 'text' is the commit message and 'files' lists
2071 missing parent), 'text' is the commit message and 'files' lists
2072 names of files touched by the revision (normalized and relative to
2072 names of files touched by the revision (normalized and relative to
2073 repository root).
2073 repository root).
2074
2074
2075 filectxfn(repo, memctx, path) is a callable receiving the
2075 filectxfn(repo, memctx, path) is a callable receiving the
2076 repository, the current memctx object and the normalized path of
2076 repository, the current memctx object and the normalized path of
2077 requested file, relative to repository root. It is fired by the
2077 requested file, relative to repository root. It is fired by the
2078 commit function for every file in 'files', but calls order is
2078 commit function for every file in 'files', but calls order is
2079 undefined. If the file is available in the revision being
2079 undefined. If the file is available in the revision being
2080 committed (updated or added), filectxfn returns a memfilectx
2080 committed (updated or added), filectxfn returns a memfilectx
2081 object. If the file was removed, filectxfn return None for recent
2081 object. If the file was removed, filectxfn return None for recent
2082 Mercurial. Moved files are represented by marking the source file
2082 Mercurial. Moved files are represented by marking the source file
2083 removed and the new file added with copy information (see
2083 removed and the new file added with copy information (see
2084 memfilectx).
2084 memfilectx).
2085
2085
2086 user receives the committer name and defaults to current
2086 user receives the committer name and defaults to current
2087 repository username, date is the commit date in any format
2087 repository username, date is the commit date in any format
2088 supported by util.parsedate() and defaults to current date, extra
2088 supported by util.parsedate() and defaults to current date, extra
2089 is a dictionary of metadata or is left empty.
2089 is a dictionary of metadata or is left empty.
2090 """
2090 """
2091
2091
2092 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2092 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2093 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2093 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2094 # this field to determine what to do in filectxfn.
2094 # this field to determine what to do in filectxfn.
2095 _returnnoneformissingfiles = True
2095 _returnnoneformissingfiles = True
2096
2096
2097 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2097 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2098 date=None, extra=None, branch=None, editor=False):
2098 date=None, extra=None, branch=None, editor=False):
2099 super(memctx, self).__init__(repo, text, user, date, extra)
2099 super(memctx, self).__init__(repo, text, user, date, extra)
2100 self._rev = None
2100 self._rev = None
2101 self._node = None
2101 self._node = None
2102 parents = [(p or nullid) for p in parents]
2102 parents = [(p or nullid) for p in parents]
2103 p1, p2 = parents
2103 p1, p2 = parents
2104 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2104 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2105 files = sorted(set(files))
2105 files = sorted(set(files))
2106 self._files = files
2106 self._files = files
2107 if branch is not None:
2107 if branch is not None:
2108 self._extra['branch'] = encoding.fromlocal(branch)
2108 self._extra['branch'] = encoding.fromlocal(branch)
2109 self.substate = {}
2109 self.substate = {}
2110
2110
2111 if isinstance(filectxfn, patch.filestore):
2111 if isinstance(filectxfn, patch.filestore):
2112 filectxfn = memfilefrompatch(filectxfn)
2112 filectxfn = memfilefrompatch(filectxfn)
2113 elif not callable(filectxfn):
2113 elif not callable(filectxfn):
2114 # if store is not callable, wrap it in a function
2114 # if store is not callable, wrap it in a function
2115 filectxfn = memfilefromctx(filectxfn)
2115 filectxfn = memfilefromctx(filectxfn)
2116
2116
2117 # memoizing increases performance for e.g. vcs convert scenarios.
2117 # memoizing increases performance for e.g. vcs convert scenarios.
2118 self._filectxfn = makecachingfilectxfn(filectxfn)
2118 self._filectxfn = makecachingfilectxfn(filectxfn)
2119
2119
2120 if editor:
2120 if editor:
2121 self._text = editor(self._repo, self, [])
2121 self._text = editor(self._repo, self, [])
2122 self._repo.savecommitmessage(self._text)
2122 self._repo.savecommitmessage(self._text)
2123
2123
2124 def filectx(self, path, filelog=None):
2124 def filectx(self, path, filelog=None):
2125 """get a file context from the working directory
2125 """get a file context from the working directory
2126
2126
2127 Returns None if file doesn't exist and should be removed."""
2127 Returns None if file doesn't exist and should be removed."""
2128 return self._filectxfn(self._repo, self, path)
2128 return self._filectxfn(self._repo, self, path)
2129
2129
2130 def commit(self):
2130 def commit(self):
2131 """commit context to the repo"""
2131 """commit context to the repo"""
2132 return self._repo.commitctx(self)
2132 return self._repo.commitctx(self)
2133
2133
2134 @propertycache
2134 @propertycache
2135 def _manifest(self):
2135 def _manifest(self):
2136 """generate a manifest based on the return values of filectxfn"""
2136 """generate a manifest based on the return values of filectxfn"""
2137
2137
2138 # keep this simple for now; just worry about p1
2138 # keep this simple for now; just worry about p1
2139 pctx = self._parents[0]
2139 pctx = self._parents[0]
2140 man = pctx.manifest().copy()
2140 man = pctx.manifest().copy()
2141
2141
2142 for f in self._status.modified:
2142 for f in self._status.modified:
2143 p1node = nullid
2143 p1node = nullid
2144 p2node = nullid
2144 p2node = nullid
2145 p = pctx[f].parents() # if file isn't in pctx, check p2?
2145 p = pctx[f].parents() # if file isn't in pctx, check p2?
2146 if len(p) > 0:
2146 if len(p) > 0:
2147 p1node = p[0].filenode()
2147 p1node = p[0].filenode()
2148 if len(p) > 1:
2148 if len(p) > 1:
2149 p2node = p[1].filenode()
2149 p2node = p[1].filenode()
2150 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2150 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2151
2151
2152 for f in self._status.added:
2152 for f in self._status.added:
2153 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2153 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2154
2154
2155 for f in self._status.removed:
2155 for f in self._status.removed:
2156 if f in man:
2156 if f in man:
2157 del man[f]
2157 del man[f]
2158
2158
2159 return man
2159 return man
2160
2160
2161 @propertycache
2161 @propertycache
2162 def _status(self):
2162 def _status(self):
2163 """Calculate exact status from ``files`` specified at construction
2163 """Calculate exact status from ``files`` specified at construction
2164 """
2164 """
2165 man1 = self.p1().manifest()
2165 man1 = self.p1().manifest()
2166 p2 = self._parents[1]
2166 p2 = self._parents[1]
2167 # "1 < len(self._parents)" can't be used for checking
2167 # "1 < len(self._parents)" can't be used for checking
2168 # existence of the 2nd parent, because "memctx._parents" is
2168 # existence of the 2nd parent, because "memctx._parents" is
2169 # explicitly initialized by the list, of which length is 2.
2169 # explicitly initialized by the list, of which length is 2.
2170 if p2.node() != nullid:
2170 if p2.node() != nullid:
2171 man2 = p2.manifest()
2171 man2 = p2.manifest()
2172 managing = lambda f: f in man1 or f in man2
2172 managing = lambda f: f in man1 or f in man2
2173 else:
2173 else:
2174 managing = lambda f: f in man1
2174 managing = lambda f: f in man1
2175
2175
2176 modified, added, removed = [], [], []
2176 modified, added, removed = [], [], []
2177 for f in self._files:
2177 for f in self._files:
2178 if not managing(f):
2178 if not managing(f):
2179 added.append(f)
2179 added.append(f)
2180 elif self[f]:
2180 elif self[f]:
2181 modified.append(f)
2181 modified.append(f)
2182 else:
2182 else:
2183 removed.append(f)
2183 removed.append(f)
2184
2184
2185 return scmutil.status(modified, added, removed, [], [], [], [])
2185 return scmutil.status(modified, added, removed, [], [], [], [])
2186
2186
2187 class memfilectx(committablefilectx):
2187 class memfilectx(committablefilectx):
2188 """memfilectx represents an in-memory file to commit.
2188 """memfilectx represents an in-memory file to commit.
2189
2189
2190 See memctx and committablefilectx for more details.
2190 See memctx and committablefilectx for more details.
2191 """
2191 """
2192 def __init__(self, repo, path, data, islink=False,
2192 def __init__(self, repo, path, data, islink=False,
2193 isexec=False, copied=None, memctx=None):
2193 isexec=False, copied=None, memctx=None):
2194 """
2194 """
2195 path is the normalized file path relative to repository root.
2195 path is the normalized file path relative to repository root.
2196 data is the file content as a string.
2196 data is the file content as a string.
2197 islink is True if the file is a symbolic link.
2197 islink is True if the file is a symbolic link.
2198 isexec is True if the file is executable.
2198 isexec is True if the file is executable.
2199 copied is the source file path if current file was copied in the
2199 copied is the source file path if current file was copied in the
2200 revision being committed, or None."""
2200 revision being committed, or None."""
2201 super(memfilectx, self).__init__(repo, path, None, memctx)
2201 super(memfilectx, self).__init__(repo, path, None, memctx)
2202 self._data = data
2202 self._data = data
2203 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2203 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2204 self._copied = None
2204 self._copied = None
2205 if copied:
2205 if copied:
2206 self._copied = (copied, nullid)
2206 self._copied = (copied, nullid)
2207
2207
2208 def data(self):
2208 def data(self):
2209 return self._data
2209 return self._data
2210
2210
2211 def remove(self, ignoremissing=False):
2211 def remove(self, ignoremissing=False):
2212 """wraps unlink for a repo's working directory"""
2212 """wraps unlink for a repo's working directory"""
2213 # need to figure out what to do here
2213 # need to figure out what to do here
2214 del self._changectx[self._path]
2214 del self._changectx[self._path]
2215
2215
2216 def write(self, data, flags):
2216 def write(self, data, flags):
2217 """wraps repo.wwrite"""
2217 """wraps repo.wwrite"""
2218 self._data = data
2218 self._data = data
2219
2219
2220 class overlayfilectx(committablefilectx):
2220 class overlayfilectx(committablefilectx):
2221 """Like memfilectx but take an original filectx and optional parameters to
2221 """Like memfilectx but take an original filectx and optional parameters to
2222 override parts of it. This is useful when fctx.data() is expensive (i.e.
2222 override parts of it. This is useful when fctx.data() is expensive (i.e.
2223 flag processor is expensive) and raw data, flags, and filenode could be
2223 flag processor is expensive) and raw data, flags, and filenode could be
2224 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2224 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2225 """
2225 """
2226
2226
2227 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2227 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2228 copied=None, ctx=None):
2228 copied=None, ctx=None):
2229 """originalfctx: filecontext to duplicate
2229 """originalfctx: filecontext to duplicate
2230
2230
2231 datafunc: None or a function to override data (file content). It is a
2231 datafunc: None or a function to override data (file content). It is a
2232 function to be lazy. path, flags, copied, ctx: None or overridden value
2232 function to be lazy. path, flags, copied, ctx: None or overridden value
2233
2233
2234 copied could be (path, rev), or False. copied could also be just path,
2234 copied could be (path, rev), or False. copied could also be just path,
2235 and will be converted to (path, nullid). This simplifies some callers.
2235 and will be converted to (path, nullid). This simplifies some callers.
2236 """
2236 """
2237
2237
2238 if path is None:
2238 if path is None:
2239 path = originalfctx.path()
2239 path = originalfctx.path()
2240 if ctx is None:
2240 if ctx is None:
2241 ctx = originalfctx.changectx()
2241 ctx = originalfctx.changectx()
2242 ctxmatch = lambda: True
2242 ctxmatch = lambda: True
2243 else:
2243 else:
2244 ctxmatch = lambda: ctx == originalfctx.changectx()
2244 ctxmatch = lambda: ctx == originalfctx.changectx()
2245
2245
2246 repo = originalfctx.repo()
2246 repo = originalfctx.repo()
2247 flog = originalfctx.filelog()
2247 flog = originalfctx.filelog()
2248 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2248 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2249
2249
2250 if copied is None:
2250 if copied is None:
2251 copied = originalfctx.renamed()
2251 copied = originalfctx.renamed()
2252 copiedmatch = lambda: True
2252 copiedmatch = lambda: True
2253 else:
2253 else:
2254 if copied and not isinstance(copied, tuple):
2254 if copied and not isinstance(copied, tuple):
2255 # repo._filecommit will recalculate copyrev so nullid is okay
2255 # repo._filecommit will recalculate copyrev so nullid is okay
2256 copied = (copied, nullid)
2256 copied = (copied, nullid)
2257 copiedmatch = lambda: copied == originalfctx.renamed()
2257 copiedmatch = lambda: copied == originalfctx.renamed()
2258
2258
2259 # When data, copied (could affect data), ctx (could affect filelog
2259 # When data, copied (could affect data), ctx (could affect filelog
2260 # parents) are not overridden, rawdata, rawflags, and filenode may be
2260 # parents) are not overridden, rawdata, rawflags, and filenode may be
2261 # reused (repo._filecommit should double check filelog parents).
2261 # reused (repo._filecommit should double check filelog parents).
2262 #
2262 #
2263 # path, flags are not hashed in filelog (but in manifestlog) so they do
2263 # path, flags are not hashed in filelog (but in manifestlog) so they do
2264 # not affect reusable here.
2264 # not affect reusable here.
2265 #
2265 #
2266 # If ctx or copied is overridden to a same value with originalfctx,
2266 # If ctx or copied is overridden to a same value with originalfctx,
2267 # still consider it's reusable. originalfctx.renamed() may be a bit
2267 # still consider it's reusable. originalfctx.renamed() may be a bit
2268 # expensive so it's not called unless necessary. Assuming datafunc is
2268 # expensive so it's not called unless necessary. Assuming datafunc is
2269 # always expensive, do not call it for this "reusable" test.
2269 # always expensive, do not call it for this "reusable" test.
2270 reusable = datafunc is None and ctxmatch() and copiedmatch()
2270 reusable = datafunc is None and ctxmatch() and copiedmatch()
2271
2271
2272 if datafunc is None:
2272 if datafunc is None:
2273 datafunc = originalfctx.data
2273 datafunc = originalfctx.data
2274 if flags is None:
2274 if flags is None:
2275 flags = originalfctx.flags()
2275 flags = originalfctx.flags()
2276
2276
2277 self._datafunc = datafunc
2277 self._datafunc = datafunc
2278 self._flags = flags
2278 self._flags = flags
2279 self._copied = copied
2279 self._copied = copied
2280
2280
2281 if reusable:
2281 if reusable:
2282 # copy extra fields from originalfctx
2282 # copy extra fields from originalfctx
2283 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2283 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2284 for attr in attrs:
2284 for attr in attrs:
2285 if util.safehasattr(originalfctx, attr):
2285 if util.safehasattr(originalfctx, attr):
2286 setattr(self, attr, getattr(originalfctx, attr))
2286 setattr(self, attr, getattr(originalfctx, attr))
2287
2287
2288 def data(self):
2288 def data(self):
2289 return self._datafunc()
2289 return self._datafunc()
2290
2290
2291 class metadataonlyctx(committablectx):
2291 class metadataonlyctx(committablectx):
2292 """Like memctx but it's reusing the manifest of different commit.
2292 """Like memctx but it's reusing the manifest of different commit.
2293 Intended to be used by lightweight operations that are creating
2293 Intended to be used by lightweight operations that are creating
2294 metadata-only changes.
2294 metadata-only changes.
2295
2295
2296 Revision information is supplied at initialization time. 'repo' is the
2296 Revision information is supplied at initialization time. 'repo' is the
2297 current localrepo, 'ctx' is original revision which manifest we're reuisng
2297 current localrepo, 'ctx' is original revision which manifest we're reuisng
2298 'parents' is a sequence of two parent revisions identifiers (pass None for
2298 'parents' is a sequence of two parent revisions identifiers (pass None for
2299 every missing parent), 'text' is the commit.
2299 every missing parent), 'text' is the commit.
2300
2300
2301 user receives the committer name and defaults to current repository
2301 user receives the committer name and defaults to current repository
2302 username, date is the commit date in any format supported by
2302 username, date is the commit date in any format supported by
2303 util.parsedate() and defaults to current date, extra is a dictionary of
2303 util.parsedate() and defaults to current date, extra is a dictionary of
2304 metadata or is left empty.
2304 metadata or is left empty.
2305 """
2305 """
2306 def __new__(cls, repo, originalctx, *args, **kwargs):
2306 def __new__(cls, repo, originalctx, *args, **kwargs):
2307 return super(metadataonlyctx, cls).__new__(cls, repo)
2307 return super(metadataonlyctx, cls).__new__(cls, repo)
2308
2308
2309 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2309 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2310 date=None, extra=None, editor=False):
2310 date=None, extra=None, editor=False):
2311 if text is None:
2311 if text is None:
2312 text = originalctx.description()
2312 text = originalctx.description()
2313 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2313 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2314 self._rev = None
2314 self._rev = None
2315 self._node = None
2315 self._node = None
2316 self._originalctx = originalctx
2316 self._originalctx = originalctx
2317 self._manifestnode = originalctx.manifestnode()
2317 self._manifestnode = originalctx.manifestnode()
2318 if parents is None:
2318 if parents is None:
2319 parents = originalctx.parents()
2319 parents = originalctx.parents()
2320 else:
2320 else:
2321 parents = [repo[p] for p in parents if p is not None]
2321 parents = [repo[p] for p in parents if p is not None]
2322 parents = parents[:]
2322 parents = parents[:]
2323 while len(parents) < 2:
2323 while len(parents) < 2:
2324 parents.append(repo[nullid])
2324 parents.append(repo[nullid])
2325 p1, p2 = self._parents = parents
2325 p1, p2 = self._parents = parents
2326
2326
2327 # sanity check to ensure that the reused manifest parents are
2327 # sanity check to ensure that the reused manifest parents are
2328 # manifests of our commit parents
2328 # manifests of our commit parents
2329 mp1, mp2 = self.manifestctx().parents
2329 mp1, mp2 = self.manifestctx().parents
2330 if p1 != nullid and p1.manifestnode() != mp1:
2330 if p1 != nullid and p1.manifestnode() != mp1:
2331 raise RuntimeError('can\'t reuse the manifest: '
2331 raise RuntimeError('can\'t reuse the manifest: '
2332 'its p1 doesn\'t match the new ctx p1')
2332 'its p1 doesn\'t match the new ctx p1')
2333 if p2 != nullid and p2.manifestnode() != mp2:
2333 if p2 != nullid and p2.manifestnode() != mp2:
2334 raise RuntimeError('can\'t reuse the manifest: '
2334 raise RuntimeError('can\'t reuse the manifest: '
2335 'its p2 doesn\'t match the new ctx p2')
2335 'its p2 doesn\'t match the new ctx p2')
2336
2336
2337 self._files = originalctx.files()
2337 self._files = originalctx.files()
2338 self.substate = {}
2338 self.substate = {}
2339
2339
2340 if editor:
2340 if editor:
2341 self._text = editor(self._repo, self, [])
2341 self._text = editor(self._repo, self, [])
2342 self._repo.savecommitmessage(self._text)
2342 self._repo.savecommitmessage(self._text)
2343
2343
2344 def manifestnode(self):
2344 def manifestnode(self):
2345 return self._manifestnode
2345 return self._manifestnode
2346
2346
2347 @property
2347 @property
2348 def _manifestctx(self):
2348 def _manifestctx(self):
2349 return self._repo.manifestlog[self._manifestnode]
2349 return self._repo.manifestlog[self._manifestnode]
2350
2350
2351 def filectx(self, path, filelog=None):
2351 def filectx(self, path, filelog=None):
2352 return self._originalctx.filectx(path, filelog=filelog)
2352 return self._originalctx.filectx(path, filelog=filelog)
2353
2353
2354 def commit(self):
2354 def commit(self):
2355 """commit context to the repo"""
2355 """commit context to the repo"""
2356 return self._repo.commitctx(self)
2356 return self._repo.commitctx(self)
2357
2357
2358 @property
2358 @property
2359 def _manifest(self):
2359 def _manifest(self):
2360 return self._originalctx.manifest()
2360 return self._originalctx.manifest()
2361
2361
2362 @propertycache
2362 @propertycache
2363 def _status(self):
2363 def _status(self):
2364 """Calculate exact status from ``files`` specified in the ``origctx``
2364 """Calculate exact status from ``files`` specified in the ``origctx``
2365 and parents manifests.
2365 and parents manifests.
2366 """
2366 """
2367 man1 = self.p1().manifest()
2367 man1 = self.p1().manifest()
2368 p2 = self._parents[1]
2368 p2 = self._parents[1]
2369 # "1 < len(self._parents)" can't be used for checking
2369 # "1 < len(self._parents)" can't be used for checking
2370 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2370 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2371 # explicitly initialized by the list, of which length is 2.
2371 # explicitly initialized by the list, of which length is 2.
2372 if p2.node() != nullid:
2372 if p2.node() != nullid:
2373 man2 = p2.manifest()
2373 man2 = p2.manifest()
2374 managing = lambda f: f in man1 or f in man2
2374 managing = lambda f: f in man1 or f in man2
2375 else:
2375 else:
2376 managing = lambda f: f in man1
2376 managing = lambda f: f in man1
2377
2377
2378 modified, added, removed = [], [], []
2378 modified, added, removed = [], [], []
2379 for f in self._files:
2379 for f in self._files:
2380 if not managing(f):
2380 if not managing(f):
2381 added.append(f)
2381 added.append(f)
2382 elif f in self:
2382 elif f in self:
2383 modified.append(f)
2383 modified.append(f)
2384 else:
2384 else:
2385 removed.append(f)
2385 removed.append(f)
2386
2386
2387 return scmutil.status(modified, added, removed, [], [], [], [])
2387 return scmutil.status(modified, added, removed, [], [], [], [])
2388
2389 class arbitraryfilectx(object):
2390 """Allows you to use filectx-like functions on a file in an arbitrary
2391 location on disk, possibly not in the working directory.
2392 """
2393 def __init__(self, path):
2394 self._path = path
2395
2396 def cmp(self, otherfilectx):
2397 return self.data() != otherfilectx.data()
2398
2399 def path(self):
2400 return self._path
2401
2402 def flags(self):
2403 return ''
2404
2405 def data(self):
2406 return util.readfile(self._path)
2407
2408 def decodeddata(self):
2409 with open(self._path, "rb") as f:
2410 return f.read()
2411
2412 def remove(self):
2413 util.unlink(self._path)
2414
2415 def write(self, data, flags):
2416 assert not flags
2417 with open(self._path, "w") as f:
2418 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now