##// END OF EJS Templates
context: drop support for looking up context by ambiguous changeid (API)...
Martin von Zweigbergk -
r37871:8b86acc7 default
parent child Browse files
Show More
@@ -1,2600 +1,2541 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirfilenodeids,
25 wdirfilenodeids,
26 wdirid,
26 wdirid,
27 wdirrev,
28 )
27 )
29 from . import (
28 from . import (
30 dagop,
29 dagop,
31 encoding,
30 encoding,
32 error,
31 error,
33 fileset,
32 fileset,
34 match as matchmod,
33 match as matchmod,
35 obsolete as obsmod,
34 obsolete as obsmod,
36 patch,
35 patch,
37 pathutil,
36 pathutil,
38 phases,
37 phases,
39 pycompat,
38 pycompat,
40 repoview,
39 repoview,
41 revlog,
40 revlog,
42 scmutil,
41 scmutil,
43 sparse,
42 sparse,
44 subrepo,
43 subrepo,
45 subrepoutil,
44 subrepoutil,
46 util,
45 util,
47 )
46 )
48 from .utils import (
47 from .utils import (
49 dateutil,
48 dateutil,
50 stringutil,
49 stringutil,
51 )
50 )
52
51
53 propertycache = util.propertycache
52 propertycache = util.propertycache
54
53
55 nonascii = re.compile(br'[^\x21-\x7f]').search
54 nonascii = re.compile(br'[^\x21-\x7f]').search
56
55
57 class basectx(object):
56 class basectx(object):
58 """A basectx object represents the common logic for its children:
57 """A basectx object represents the common logic for its children:
59 changectx: read-only context that is already present in the repo,
58 changectx: read-only context that is already present in the repo,
60 workingctx: a context that represents the working directory and can
59 workingctx: a context that represents the working directory and can
61 be committed,
60 be committed,
62 memctx: a context that represents changes in-memory and can also
61 memctx: a context that represents changes in-memory and can also
63 be committed."""
62 be committed."""
64
63
65 def __init__(self, repo):
64 def __init__(self, repo):
66 self._repo = repo
65 self._repo = repo
67
66
68 def __bytes__(self):
67 def __bytes__(self):
69 return short(self.node())
68 return short(self.node())
70
69
71 __str__ = encoding.strmethod(__bytes__)
70 __str__ = encoding.strmethod(__bytes__)
72
71
73 def __repr__(self):
72 def __repr__(self):
74 return r"<%s %s>" % (type(self).__name__, str(self))
73 return r"<%s %s>" % (type(self).__name__, str(self))
75
74
76 def __eq__(self, other):
75 def __eq__(self, other):
77 try:
76 try:
78 return type(self) == type(other) and self._rev == other._rev
77 return type(self) == type(other) and self._rev == other._rev
79 except AttributeError:
78 except AttributeError:
80 return False
79 return False
81
80
82 def __ne__(self, other):
81 def __ne__(self, other):
83 return not (self == other)
82 return not (self == other)
84
83
85 def __contains__(self, key):
84 def __contains__(self, key):
86 return key in self._manifest
85 return key in self._manifest
87
86
88 def __getitem__(self, key):
87 def __getitem__(self, key):
89 return self.filectx(key)
88 return self.filectx(key)
90
89
91 def __iter__(self):
90 def __iter__(self):
92 return iter(self._manifest)
91 return iter(self._manifest)
93
92
94 def _buildstatusmanifest(self, status):
93 def _buildstatusmanifest(self, status):
95 """Builds a manifest that includes the given status results, if this is
94 """Builds a manifest that includes the given status results, if this is
96 a working copy context. For non-working copy contexts, it just returns
95 a working copy context. For non-working copy contexts, it just returns
97 the normal manifest."""
96 the normal manifest."""
98 return self.manifest()
97 return self.manifest()
99
98
100 def _matchstatus(self, other, match):
99 def _matchstatus(self, other, match):
101 """This internal method provides a way for child objects to override the
100 """This internal method provides a way for child objects to override the
102 match operator.
101 match operator.
103 """
102 """
104 return match
103 return match
105
104
106 def _buildstatus(self, other, s, match, listignored, listclean,
105 def _buildstatus(self, other, s, match, listignored, listclean,
107 listunknown):
106 listunknown):
108 """build a status with respect to another context"""
107 """build a status with respect to another context"""
109 # Load earliest manifest first for caching reasons. More specifically,
108 # Load earliest manifest first for caching reasons. More specifically,
110 # if you have revisions 1000 and 1001, 1001 is probably stored as a
109 # if you have revisions 1000 and 1001, 1001 is probably stored as a
111 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
110 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
112 # 1000 and cache it so that when you read 1001, we just need to apply a
111 # 1000 and cache it so that when you read 1001, we just need to apply a
113 # delta to what's in the cache. So that's one full reconstruction + one
112 # delta to what's in the cache. So that's one full reconstruction + one
114 # delta application.
113 # delta application.
115 mf2 = None
114 mf2 = None
116 if self.rev() is not None and self.rev() < other.rev():
115 if self.rev() is not None and self.rev() < other.rev():
117 mf2 = self._buildstatusmanifest(s)
116 mf2 = self._buildstatusmanifest(s)
118 mf1 = other._buildstatusmanifest(s)
117 mf1 = other._buildstatusmanifest(s)
119 if mf2 is None:
118 if mf2 is None:
120 mf2 = self._buildstatusmanifest(s)
119 mf2 = self._buildstatusmanifest(s)
121
120
122 modified, added = [], []
121 modified, added = [], []
123 removed = []
122 removed = []
124 clean = []
123 clean = []
125 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
124 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
126 deletedset = set(deleted)
125 deletedset = set(deleted)
127 d = mf1.diff(mf2, match=match, clean=listclean)
126 d = mf1.diff(mf2, match=match, clean=listclean)
128 for fn, value in d.iteritems():
127 for fn, value in d.iteritems():
129 if fn in deletedset:
128 if fn in deletedset:
130 continue
129 continue
131 if value is None:
130 if value is None:
132 clean.append(fn)
131 clean.append(fn)
133 continue
132 continue
134 (node1, flag1), (node2, flag2) = value
133 (node1, flag1), (node2, flag2) = value
135 if node1 is None:
134 if node1 is None:
136 added.append(fn)
135 added.append(fn)
137 elif node2 is None:
136 elif node2 is None:
138 removed.append(fn)
137 removed.append(fn)
139 elif flag1 != flag2:
138 elif flag1 != flag2:
140 modified.append(fn)
139 modified.append(fn)
141 elif node2 not in wdirfilenodeids:
140 elif node2 not in wdirfilenodeids:
142 # When comparing files between two commits, we save time by
141 # When comparing files between two commits, we save time by
143 # not comparing the file contents when the nodeids differ.
142 # not comparing the file contents when the nodeids differ.
144 # Note that this means we incorrectly report a reverted change
143 # Note that this means we incorrectly report a reverted change
145 # to a file as a modification.
144 # to a file as a modification.
146 modified.append(fn)
145 modified.append(fn)
147 elif self[fn].cmp(other[fn]):
146 elif self[fn].cmp(other[fn]):
148 modified.append(fn)
147 modified.append(fn)
149 else:
148 else:
150 clean.append(fn)
149 clean.append(fn)
151
150
152 if removed:
151 if removed:
153 # need to filter files if they are already reported as removed
152 # need to filter files if they are already reported as removed
154 unknown = [fn for fn in unknown if fn not in mf1 and
153 unknown = [fn for fn in unknown if fn not in mf1 and
155 (not match or match(fn))]
154 (not match or match(fn))]
156 ignored = [fn for fn in ignored if fn not in mf1 and
155 ignored = [fn for fn in ignored if fn not in mf1 and
157 (not match or match(fn))]
156 (not match or match(fn))]
158 # if they're deleted, don't report them as removed
157 # if they're deleted, don't report them as removed
159 removed = [fn for fn in removed if fn not in deletedset]
158 removed = [fn for fn in removed if fn not in deletedset]
160
159
161 return scmutil.status(modified, added, removed, deleted, unknown,
160 return scmutil.status(modified, added, removed, deleted, unknown,
162 ignored, clean)
161 ignored, clean)
163
162
164 @propertycache
163 @propertycache
165 def substate(self):
164 def substate(self):
166 return subrepoutil.state(self, self._repo.ui)
165 return subrepoutil.state(self, self._repo.ui)
167
166
168 def subrev(self, subpath):
167 def subrev(self, subpath):
169 return self.substate[subpath][1]
168 return self.substate[subpath][1]
170
169
171 def rev(self):
170 def rev(self):
172 return self._rev
171 return self._rev
173 def node(self):
172 def node(self):
174 return self._node
173 return self._node
175 def hex(self):
174 def hex(self):
176 return hex(self.node())
175 return hex(self.node())
177 def manifest(self):
176 def manifest(self):
178 return self._manifest
177 return self._manifest
179 def manifestctx(self):
178 def manifestctx(self):
180 return self._manifestctx
179 return self._manifestctx
181 def repo(self):
180 def repo(self):
182 return self._repo
181 return self._repo
183 def phasestr(self):
182 def phasestr(self):
184 return phases.phasenames[self.phase()]
183 return phases.phasenames[self.phase()]
185 def mutable(self):
184 def mutable(self):
186 return self.phase() > phases.public
185 return self.phase() > phases.public
187
186
188 def getfileset(self, expr):
187 def getfileset(self, expr):
189 return fileset.getfileset(self, expr)
188 return fileset.getfileset(self, expr)
190
189
191 def obsolete(self):
190 def obsolete(self):
192 """True if the changeset is obsolete"""
191 """True if the changeset is obsolete"""
193 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
192 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
194
193
195 def extinct(self):
194 def extinct(self):
196 """True if the changeset is extinct"""
195 """True if the changeset is extinct"""
197 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
196 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
198
197
199 def orphan(self):
198 def orphan(self):
200 """True if the changeset is not obsolete but it's ancestor are"""
199 """True if the changeset is not obsolete but it's ancestor are"""
201 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
200 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
202
201
203 def phasedivergent(self):
202 def phasedivergent(self):
204 """True if the changeset try to be a successor of a public changeset
203 """True if the changeset try to be a successor of a public changeset
205
204
206 Only non-public and non-obsolete changesets may be bumped.
205 Only non-public and non-obsolete changesets may be bumped.
207 """
206 """
208 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
207 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
209
208
210 def contentdivergent(self):
209 def contentdivergent(self):
211 """Is a successors of a changeset with multiple possible successors set
210 """Is a successors of a changeset with multiple possible successors set
212
211
213 Only non-public and non-obsolete changesets may be divergent.
212 Only non-public and non-obsolete changesets may be divergent.
214 """
213 """
215 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
214 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
216
215
217 def isunstable(self):
216 def isunstable(self):
218 """True if the changeset is either unstable, bumped or divergent"""
217 """True if the changeset is either unstable, bumped or divergent"""
219 return self.orphan() or self.phasedivergent() or self.contentdivergent()
218 return self.orphan() or self.phasedivergent() or self.contentdivergent()
220
219
221 def instabilities(self):
220 def instabilities(self):
222 """return the list of instabilities affecting this changeset.
221 """return the list of instabilities affecting this changeset.
223
222
224 Instabilities are returned as strings. possible values are:
223 Instabilities are returned as strings. possible values are:
225 - orphan,
224 - orphan,
226 - phase-divergent,
225 - phase-divergent,
227 - content-divergent.
226 - content-divergent.
228 """
227 """
229 instabilities = []
228 instabilities = []
230 if self.orphan():
229 if self.orphan():
231 instabilities.append('orphan')
230 instabilities.append('orphan')
232 if self.phasedivergent():
231 if self.phasedivergent():
233 instabilities.append('phase-divergent')
232 instabilities.append('phase-divergent')
234 if self.contentdivergent():
233 if self.contentdivergent():
235 instabilities.append('content-divergent')
234 instabilities.append('content-divergent')
236 return instabilities
235 return instabilities
237
236
238 def parents(self):
237 def parents(self):
239 """return contexts for each parent changeset"""
238 """return contexts for each parent changeset"""
240 return self._parents
239 return self._parents
241
240
242 def p1(self):
241 def p1(self):
243 return self._parents[0]
242 return self._parents[0]
244
243
245 def p2(self):
244 def p2(self):
246 parents = self._parents
245 parents = self._parents
247 if len(parents) == 2:
246 if len(parents) == 2:
248 return parents[1]
247 return parents[1]
249 return changectx(self._repo, nullrev)
248 return changectx(self._repo, nullrev)
250
249
251 def _fileinfo(self, path):
250 def _fileinfo(self, path):
252 if r'_manifest' in self.__dict__:
251 if r'_manifest' in self.__dict__:
253 try:
252 try:
254 return self._manifest[path], self._manifest.flags(path)
253 return self._manifest[path], self._manifest.flags(path)
255 except KeyError:
254 except KeyError:
256 raise error.ManifestLookupError(self._node, path,
255 raise error.ManifestLookupError(self._node, path,
257 _('not found in manifest'))
256 _('not found in manifest'))
258 if r'_manifestdelta' in self.__dict__ or path in self.files():
257 if r'_manifestdelta' in self.__dict__ or path in self.files():
259 if path in self._manifestdelta:
258 if path in self._manifestdelta:
260 return (self._manifestdelta[path],
259 return (self._manifestdelta[path],
261 self._manifestdelta.flags(path))
260 self._manifestdelta.flags(path))
262 mfl = self._repo.manifestlog
261 mfl = self._repo.manifestlog
263 try:
262 try:
264 node, flag = mfl[self._changeset.manifest].find(path)
263 node, flag = mfl[self._changeset.manifest].find(path)
265 except KeyError:
264 except KeyError:
266 raise error.ManifestLookupError(self._node, path,
265 raise error.ManifestLookupError(self._node, path,
267 _('not found in manifest'))
266 _('not found in manifest'))
268
267
269 return node, flag
268 return node, flag
270
269
271 def filenode(self, path):
270 def filenode(self, path):
272 return self._fileinfo(path)[0]
271 return self._fileinfo(path)[0]
273
272
274 def flags(self, path):
273 def flags(self, path):
275 try:
274 try:
276 return self._fileinfo(path)[1]
275 return self._fileinfo(path)[1]
277 except error.LookupError:
276 except error.LookupError:
278 return ''
277 return ''
279
278
280 def sub(self, path, allowcreate=True):
279 def sub(self, path, allowcreate=True):
281 '''return a subrepo for the stored revision of path, never wdir()'''
280 '''return a subrepo for the stored revision of path, never wdir()'''
282 return subrepo.subrepo(self, path, allowcreate=allowcreate)
281 return subrepo.subrepo(self, path, allowcreate=allowcreate)
283
282
284 def nullsub(self, path, pctx):
283 def nullsub(self, path, pctx):
285 return subrepo.nullsubrepo(self, path, pctx)
284 return subrepo.nullsubrepo(self, path, pctx)
286
285
287 def workingsub(self, path):
286 def workingsub(self, path):
288 '''return a subrepo for the stored revision, or wdir if this is a wdir
287 '''return a subrepo for the stored revision, or wdir if this is a wdir
289 context.
288 context.
290 '''
289 '''
291 return subrepo.subrepo(self, path, allowwdir=True)
290 return subrepo.subrepo(self, path, allowwdir=True)
292
291
293 def match(self, pats=None, include=None, exclude=None, default='glob',
292 def match(self, pats=None, include=None, exclude=None, default='glob',
294 listsubrepos=False, badfn=None):
293 listsubrepos=False, badfn=None):
295 r = self._repo
294 r = self._repo
296 return matchmod.match(r.root, r.getcwd(), pats,
295 return matchmod.match(r.root, r.getcwd(), pats,
297 include, exclude, default,
296 include, exclude, default,
298 auditor=r.nofsauditor, ctx=self,
297 auditor=r.nofsauditor, ctx=self,
299 listsubrepos=listsubrepos, badfn=badfn)
298 listsubrepos=listsubrepos, badfn=badfn)
300
299
301 def diff(self, ctx2=None, match=None, **opts):
300 def diff(self, ctx2=None, match=None, **opts):
302 """Returns a diff generator for the given contexts and matcher"""
301 """Returns a diff generator for the given contexts and matcher"""
303 if ctx2 is None:
302 if ctx2 is None:
304 ctx2 = self.p1()
303 ctx2 = self.p1()
305 if ctx2 is not None:
304 if ctx2 is not None:
306 ctx2 = self._repo[ctx2]
305 ctx2 = self._repo[ctx2]
307 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
306 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
308 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
307 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
309
308
310 def dirs(self):
309 def dirs(self):
311 return self._manifest.dirs()
310 return self._manifest.dirs()
312
311
313 def hasdir(self, dir):
312 def hasdir(self, dir):
314 return self._manifest.hasdir(dir)
313 return self._manifest.hasdir(dir)
315
314
316 def status(self, other=None, match=None, listignored=False,
315 def status(self, other=None, match=None, listignored=False,
317 listclean=False, listunknown=False, listsubrepos=False):
316 listclean=False, listunknown=False, listsubrepos=False):
318 """return status of files between two nodes or node and working
317 """return status of files between two nodes or node and working
319 directory.
318 directory.
320
319
321 If other is None, compare this node with working directory.
320 If other is None, compare this node with working directory.
322
321
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
322 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 """
323 """
325
324
326 ctx1 = self
325 ctx1 = self
327 ctx2 = self._repo[other]
326 ctx2 = self._repo[other]
328
327
329 # This next code block is, admittedly, fragile logic that tests for
328 # This next code block is, admittedly, fragile logic that tests for
330 # reversing the contexts and wouldn't need to exist if it weren't for
329 # reversing the contexts and wouldn't need to exist if it weren't for
331 # the fast (and common) code path of comparing the working directory
330 # the fast (and common) code path of comparing the working directory
332 # with its first parent.
331 # with its first parent.
333 #
332 #
334 # What we're aiming for here is the ability to call:
333 # What we're aiming for here is the ability to call:
335 #
334 #
336 # workingctx.status(parentctx)
335 # workingctx.status(parentctx)
337 #
336 #
338 # If we always built the manifest for each context and compared those,
337 # If we always built the manifest for each context and compared those,
339 # then we'd be done. But the special case of the above call means we
338 # then we'd be done. But the special case of the above call means we
340 # just copy the manifest of the parent.
339 # just copy the manifest of the parent.
341 reversed = False
340 reversed = False
342 if (not isinstance(ctx1, changectx)
341 if (not isinstance(ctx1, changectx)
343 and isinstance(ctx2, changectx)):
342 and isinstance(ctx2, changectx)):
344 reversed = True
343 reversed = True
345 ctx1, ctx2 = ctx2, ctx1
344 ctx1, ctx2 = ctx2, ctx1
346
345
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
346 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
348 match = ctx2._matchstatus(ctx1, match)
347 match = ctx2._matchstatus(ctx1, match)
349 r = scmutil.status([], [], [], [], [], [], [])
348 r = scmutil.status([], [], [], [], [], [], [])
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
349 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 listunknown)
350 listunknown)
352
351
353 if reversed:
352 if reversed:
354 # Reverse added and removed. Clear deleted, unknown and ignored as
353 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # these make no sense to reverse.
354 # these make no sense to reverse.
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
355 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r.clean)
356 r.clean)
358
357
359 if listsubrepos:
358 if listsubrepos:
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
359 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 try:
360 try:
362 rev2 = ctx2.subrev(subpath)
361 rev2 = ctx2.subrev(subpath)
363 except KeyError:
362 except KeyError:
364 # A subrepo that existed in node1 was deleted between
363 # A subrepo that existed in node1 was deleted between
365 # node1 and node2 (inclusive). Thus, ctx2's substate
364 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # won't contain that subpath. The best we can do ignore it.
365 # won't contain that subpath. The best we can do ignore it.
367 rev2 = None
366 rev2 = None
368 submatch = matchmod.subdirmatcher(subpath, match)
367 submatch = matchmod.subdirmatcher(subpath, match)
369 s = sub.status(rev2, match=submatch, ignored=listignored,
368 s = sub.status(rev2, match=submatch, ignored=listignored,
370 clean=listclean, unknown=listunknown,
369 clean=listclean, unknown=listunknown,
371 listsubrepos=True)
370 listsubrepos=True)
372 for rfiles, sfiles in zip(r, s):
371 for rfiles, sfiles in zip(r, s):
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
372 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374
373
375 for l in r:
374 for l in r:
376 l.sort()
375 l.sort()
377
376
378 return r
377 return r
379
378
380 def changectxdeprecwarn(repo):
381 # changectx's constructor will soon lose support for these forms of
382 # changeids:
383 # * stringinfied ints
384 # * bookmarks, tags, branches, and other namespace identifiers
385 # * hex nodeid prefixes
386 #
387 # Depending on your use case, replace repo[x] by one of these:
388 # * If you want to support general revsets, use scmutil.revsingle(x)
389 # * If you know that "x" is a stringified int, use repo[int(x)]
390 # * If you know that "x" is a bookmark, use repo._bookmarks.changectx(x)
391 # * If you know that "x" is a tag, use repo[repo.tags()[x]]
392 # * If you know that "x" is a branch or in some other namespace,
393 # use the appropriate mechanism for that namespace
394 # * If you know that "x" is a hex nodeid prefix, use
395 # repo[scmutil.resolvehexnodeidprefix(repo, x)]
396 # * If "x" is a string that can be any of the above, but you don't want
397 # to allow general revsets (perhaps because "x" may come from a remote
398 # user and the revset may be too costly), use scmutil.revsymbol(repo, x)
399 # * If "x" can be a mix of the above, you'll have to figure it out
400 # yourself
401 repo.ui.deprecwarn("changectx.__init__ is getting more limited, see "
402 "context.changectxdeprecwarn() for details", "4.6",
403 stacklevel=4)
404
405 class changectx(basectx):
379 class changectx(basectx):
406 """A changecontext object makes access to data related to a particular
380 """A changecontext object makes access to data related to a particular
407 changeset convenient. It represents a read-only context already present in
381 changeset convenient. It represents a read-only context already present in
408 the repo."""
382 the repo."""
409 def __init__(self, repo, changeid='.'):
383 def __init__(self, repo, changeid='.'):
410 """changeid is a revision number, node, or tag"""
384 """changeid is a revision number, node, or tag"""
411 super(changectx, self).__init__(repo)
385 super(changectx, self).__init__(repo)
412
386
413 try:
387 try:
414 if isinstance(changeid, int):
388 if isinstance(changeid, int):
415 self._node = repo.changelog.node(changeid)
389 self._node = repo.changelog.node(changeid)
416 self._rev = changeid
390 self._rev = changeid
417 return
391 return
418 if changeid == 'null':
392 if changeid == 'null':
419 self._node = nullid
393 self._node = nullid
420 self._rev = nullrev
394 self._rev = nullrev
421 return
395 return
422 if changeid == 'tip':
396 if changeid == 'tip':
423 self._node = repo.changelog.tip()
397 self._node = repo.changelog.tip()
424 self._rev = repo.changelog.rev(self._node)
398 self._rev = repo.changelog.rev(self._node)
425 return
399 return
426 if (changeid == '.'
400 if (changeid == '.'
427 or repo.local() and changeid == repo.dirstate.p1()):
401 or repo.local() and changeid == repo.dirstate.p1()):
428 # this is a hack to delay/avoid loading obsmarkers
402 # this is a hack to delay/avoid loading obsmarkers
429 # when we know that '.' won't be hidden
403 # when we know that '.' won't be hidden
430 self._node = repo.dirstate.p1()
404 self._node = repo.dirstate.p1()
431 self._rev = repo.unfiltered().changelog.rev(self._node)
405 self._rev = repo.unfiltered().changelog.rev(self._node)
432 return
406 return
433 if len(changeid) == 20:
407 if len(changeid) == 20:
434 try:
408 try:
435 self._node = changeid
409 self._node = changeid
436 self._rev = repo.changelog.rev(changeid)
410 self._rev = repo.changelog.rev(changeid)
437 return
411 return
438 except error.FilteredLookupError:
412 except error.FilteredLookupError:
439 raise
413 raise
440 except LookupError:
414 except LookupError:
441 pass
415 pass
442
416
443 try:
444 r = int(changeid)
445 if '%d' % r != changeid:
446 raise ValueError
447 l = len(repo.changelog)
448 if r < 0:
449 r += l
450 if r < 0 or r >= l and r != wdirrev:
451 raise ValueError
452 self._rev = r
453 self._node = repo.changelog.node(r)
454 changectxdeprecwarn(repo)
455 return
456 except error.FilteredIndexError:
457 raise
458 except (ValueError, OverflowError, IndexError):
459 pass
460
461 if len(changeid) == 40:
417 if len(changeid) == 40:
462 try:
418 try:
463 self._node = bin(changeid)
419 self._node = bin(changeid)
464 self._rev = repo.changelog.rev(self._node)
420 self._rev = repo.changelog.rev(self._node)
465 return
421 return
466 except error.FilteredLookupError:
422 except error.FilteredLookupError:
467 raise
423 raise
468 except (TypeError, LookupError):
424 except (TypeError, LookupError):
469 pass
425 pass
470
426
471 # lookup bookmarks through the name interface
472 try:
473 self._node = repo.names.singlenode(repo, changeid)
474 self._rev = repo.changelog.rev(self._node)
475 changectxdeprecwarn(repo)
476 return
477 except KeyError:
478 pass
479
480 self._node = scmutil.resolvehexnodeidprefix(repo, changeid)
481 if self._node is not None:
482 self._rev = repo.changelog.rev(self._node)
483 changectxdeprecwarn(repo)
484 return
485
486 # lookup failed
427 # lookup failed
487 # check if it might have come from damaged dirstate
428 # check if it might have come from damaged dirstate
488 #
429 #
489 # XXX we could avoid the unfiltered if we had a recognizable
430 # XXX we could avoid the unfiltered if we had a recognizable
490 # exception for filtered changeset access
431 # exception for filtered changeset access
491 if (repo.local()
432 if (repo.local()
492 and changeid in repo.unfiltered().dirstate.parents()):
433 and changeid in repo.unfiltered().dirstate.parents()):
493 msg = _("working directory has unknown parent '%s'!")
434 msg = _("working directory has unknown parent '%s'!")
494 raise error.Abort(msg % short(changeid))
435 raise error.Abort(msg % short(changeid))
495 try:
436 try:
496 if len(changeid) == 20 and nonascii(changeid):
437 if len(changeid) == 20 and nonascii(changeid):
497 changeid = hex(changeid)
438 changeid = hex(changeid)
498 except TypeError:
439 except TypeError:
499 pass
440 pass
500 except (error.FilteredIndexError, error.FilteredLookupError):
441 except (error.FilteredIndexError, error.FilteredLookupError):
501 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
442 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
502 % changeid)
443 % changeid)
503 except error.FilteredRepoLookupError:
444 except error.FilteredRepoLookupError:
504 raise
445 raise
505 except IndexError:
446 except IndexError:
506 pass
447 pass
507 raise error.RepoLookupError(
448 raise error.RepoLookupError(
508 _("unknown revision '%s'") % changeid)
449 _("unknown revision '%s'") % changeid)
509
450
510 def __hash__(self):
451 def __hash__(self):
511 try:
452 try:
512 return hash(self._rev)
453 return hash(self._rev)
513 except AttributeError:
454 except AttributeError:
514 return id(self)
455 return id(self)
515
456
516 def __nonzero__(self):
457 def __nonzero__(self):
517 return self._rev != nullrev
458 return self._rev != nullrev
518
459
519 __bool__ = __nonzero__
460 __bool__ = __nonzero__
520
461
521 @propertycache
462 @propertycache
522 def _changeset(self):
463 def _changeset(self):
523 return self._repo.changelog.changelogrevision(self.rev())
464 return self._repo.changelog.changelogrevision(self.rev())
524
465
525 @propertycache
466 @propertycache
526 def _manifest(self):
467 def _manifest(self):
527 return self._manifestctx.read()
468 return self._manifestctx.read()
528
469
529 @property
470 @property
530 def _manifestctx(self):
471 def _manifestctx(self):
531 return self._repo.manifestlog[self._changeset.manifest]
472 return self._repo.manifestlog[self._changeset.manifest]
532
473
533 @propertycache
474 @propertycache
534 def _manifestdelta(self):
475 def _manifestdelta(self):
535 return self._manifestctx.readdelta()
476 return self._manifestctx.readdelta()
536
477
537 @propertycache
478 @propertycache
538 def _parents(self):
479 def _parents(self):
539 repo = self._repo
480 repo = self._repo
540 p1, p2 = repo.changelog.parentrevs(self._rev)
481 p1, p2 = repo.changelog.parentrevs(self._rev)
541 if p2 == nullrev:
482 if p2 == nullrev:
542 return [changectx(repo, p1)]
483 return [changectx(repo, p1)]
543 return [changectx(repo, p1), changectx(repo, p2)]
484 return [changectx(repo, p1), changectx(repo, p2)]
544
485
545 def changeset(self):
486 def changeset(self):
546 c = self._changeset
487 c = self._changeset
547 return (
488 return (
548 c.manifest,
489 c.manifest,
549 c.user,
490 c.user,
550 c.date,
491 c.date,
551 c.files,
492 c.files,
552 c.description,
493 c.description,
553 c.extra,
494 c.extra,
554 )
495 )
555 def manifestnode(self):
496 def manifestnode(self):
556 return self._changeset.manifest
497 return self._changeset.manifest
557
498
558 def user(self):
499 def user(self):
559 return self._changeset.user
500 return self._changeset.user
560 def date(self):
501 def date(self):
561 return self._changeset.date
502 return self._changeset.date
562 def files(self):
503 def files(self):
563 return self._changeset.files
504 return self._changeset.files
564 def description(self):
505 def description(self):
565 return self._changeset.description
506 return self._changeset.description
566 def branch(self):
507 def branch(self):
567 return encoding.tolocal(self._changeset.extra.get("branch"))
508 return encoding.tolocal(self._changeset.extra.get("branch"))
568 def closesbranch(self):
509 def closesbranch(self):
569 return 'close' in self._changeset.extra
510 return 'close' in self._changeset.extra
570 def extra(self):
511 def extra(self):
571 """Return a dict of extra information."""
512 """Return a dict of extra information."""
572 return self._changeset.extra
513 return self._changeset.extra
573 def tags(self):
514 def tags(self):
574 """Return a list of byte tag names"""
515 """Return a list of byte tag names"""
575 return self._repo.nodetags(self._node)
516 return self._repo.nodetags(self._node)
576 def bookmarks(self):
517 def bookmarks(self):
577 """Return a list of byte bookmark names."""
518 """Return a list of byte bookmark names."""
578 return self._repo.nodebookmarks(self._node)
519 return self._repo.nodebookmarks(self._node)
579 def phase(self):
520 def phase(self):
580 return self._repo._phasecache.phase(self._repo, self._rev)
521 return self._repo._phasecache.phase(self._repo, self._rev)
581 def hidden(self):
522 def hidden(self):
582 return self._rev in repoview.filterrevs(self._repo, 'visible')
523 return self._rev in repoview.filterrevs(self._repo, 'visible')
583
524
584 def isinmemory(self):
525 def isinmemory(self):
585 return False
526 return False
586
527
587 def children(self):
528 def children(self):
588 """return list of changectx contexts for each child changeset.
529 """return list of changectx contexts for each child changeset.
589
530
590 This returns only the immediate child changesets. Use descendants() to
531 This returns only the immediate child changesets. Use descendants() to
591 recursively walk children.
532 recursively walk children.
592 """
533 """
593 c = self._repo.changelog.children(self._node)
534 c = self._repo.changelog.children(self._node)
594 return [changectx(self._repo, x) for x in c]
535 return [changectx(self._repo, x) for x in c]
595
536
596 def ancestors(self):
537 def ancestors(self):
597 for a in self._repo.changelog.ancestors([self._rev]):
538 for a in self._repo.changelog.ancestors([self._rev]):
598 yield changectx(self._repo, a)
539 yield changectx(self._repo, a)
599
540
600 def descendants(self):
541 def descendants(self):
601 """Recursively yield all children of the changeset.
542 """Recursively yield all children of the changeset.
602
543
603 For just the immediate children, use children()
544 For just the immediate children, use children()
604 """
545 """
605 for d in self._repo.changelog.descendants([self._rev]):
546 for d in self._repo.changelog.descendants([self._rev]):
606 yield changectx(self._repo, d)
547 yield changectx(self._repo, d)
607
548
608 def filectx(self, path, fileid=None, filelog=None):
549 def filectx(self, path, fileid=None, filelog=None):
609 """get a file context from this changeset"""
550 """get a file context from this changeset"""
610 if fileid is None:
551 if fileid is None:
611 fileid = self.filenode(path)
552 fileid = self.filenode(path)
612 return filectx(self._repo, path, fileid=fileid,
553 return filectx(self._repo, path, fileid=fileid,
613 changectx=self, filelog=filelog)
554 changectx=self, filelog=filelog)
614
555
615 def ancestor(self, c2, warn=False):
556 def ancestor(self, c2, warn=False):
616 """return the "best" ancestor context of self and c2
557 """return the "best" ancestor context of self and c2
617
558
618 If there are multiple candidates, it will show a message and check
559 If there are multiple candidates, it will show a message and check
619 merge.preferancestor configuration before falling back to the
560 merge.preferancestor configuration before falling back to the
620 revlog ancestor."""
561 revlog ancestor."""
621 # deal with workingctxs
562 # deal with workingctxs
622 n2 = c2._node
563 n2 = c2._node
623 if n2 is None:
564 if n2 is None:
624 n2 = c2._parents[0]._node
565 n2 = c2._parents[0]._node
625 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
566 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
626 if not cahs:
567 if not cahs:
627 anc = nullid
568 anc = nullid
628 elif len(cahs) == 1:
569 elif len(cahs) == 1:
629 anc = cahs[0]
570 anc = cahs[0]
630 else:
571 else:
631 # experimental config: merge.preferancestor
572 # experimental config: merge.preferancestor
632 for r in self._repo.ui.configlist('merge', 'preferancestor'):
573 for r in self._repo.ui.configlist('merge', 'preferancestor'):
633 try:
574 try:
634 ctx = scmutil.revsymbol(self._repo, r)
575 ctx = scmutil.revsymbol(self._repo, r)
635 except error.RepoLookupError:
576 except error.RepoLookupError:
636 continue
577 continue
637 anc = ctx.node()
578 anc = ctx.node()
638 if anc in cahs:
579 if anc in cahs:
639 break
580 break
640 else:
581 else:
641 anc = self._repo.changelog.ancestor(self._node, n2)
582 anc = self._repo.changelog.ancestor(self._node, n2)
642 if warn:
583 if warn:
643 self._repo.ui.status(
584 self._repo.ui.status(
644 (_("note: using %s as ancestor of %s and %s\n") %
585 (_("note: using %s as ancestor of %s and %s\n") %
645 (short(anc), short(self._node), short(n2))) +
586 (short(anc), short(self._node), short(n2))) +
646 ''.join(_(" alternatively, use --config "
587 ''.join(_(" alternatively, use --config "
647 "merge.preferancestor=%s\n") %
588 "merge.preferancestor=%s\n") %
648 short(n) for n in sorted(cahs) if n != anc))
589 short(n) for n in sorted(cahs) if n != anc))
649 return changectx(self._repo, anc)
590 return changectx(self._repo, anc)
650
591
651 def descendant(self, other):
592 def descendant(self, other):
652 """True if other is descendant of this changeset"""
593 """True if other is descendant of this changeset"""
653 return self._repo.changelog.descendant(self._rev, other._rev)
594 return self._repo.changelog.descendant(self._rev, other._rev)
654
595
655 def walk(self, match):
596 def walk(self, match):
656 '''Generates matching file names.'''
597 '''Generates matching file names.'''
657
598
658 # Wrap match.bad method to have message with nodeid
599 # Wrap match.bad method to have message with nodeid
659 def bad(fn, msg):
600 def bad(fn, msg):
660 # The manifest doesn't know about subrepos, so don't complain about
601 # The manifest doesn't know about subrepos, so don't complain about
661 # paths into valid subrepos.
602 # paths into valid subrepos.
662 if any(fn == s or fn.startswith(s + '/')
603 if any(fn == s or fn.startswith(s + '/')
663 for s in self.substate):
604 for s in self.substate):
664 return
605 return
665 match.bad(fn, _('no such file in rev %s') % self)
606 match.bad(fn, _('no such file in rev %s') % self)
666
607
667 m = matchmod.badmatch(match, bad)
608 m = matchmod.badmatch(match, bad)
668 return self._manifest.walk(m)
609 return self._manifest.walk(m)
669
610
670 def matches(self, match):
611 def matches(self, match):
671 return self.walk(match)
612 return self.walk(match)
672
613
673 class basefilectx(object):
614 class basefilectx(object):
674 """A filecontext object represents the common logic for its children:
615 """A filecontext object represents the common logic for its children:
675 filectx: read-only access to a filerevision that is already present
616 filectx: read-only access to a filerevision that is already present
676 in the repo,
617 in the repo,
677 workingfilectx: a filecontext that represents files from the working
618 workingfilectx: a filecontext that represents files from the working
678 directory,
619 directory,
679 memfilectx: a filecontext that represents files in-memory,
620 memfilectx: a filecontext that represents files in-memory,
680 overlayfilectx: duplicate another filecontext with some fields overridden.
621 overlayfilectx: duplicate another filecontext with some fields overridden.
681 """
622 """
682 @propertycache
623 @propertycache
683 def _filelog(self):
624 def _filelog(self):
684 return self._repo.file(self._path)
625 return self._repo.file(self._path)
685
626
686 @propertycache
627 @propertycache
687 def _changeid(self):
628 def _changeid(self):
688 if r'_changeid' in self.__dict__:
629 if r'_changeid' in self.__dict__:
689 return self._changeid
630 return self._changeid
690 elif r'_changectx' in self.__dict__:
631 elif r'_changectx' in self.__dict__:
691 return self._changectx.rev()
632 return self._changectx.rev()
692 elif r'_descendantrev' in self.__dict__:
633 elif r'_descendantrev' in self.__dict__:
693 # this file context was created from a revision with a known
634 # this file context was created from a revision with a known
694 # descendant, we can (lazily) correct for linkrev aliases
635 # descendant, we can (lazily) correct for linkrev aliases
695 return self._adjustlinkrev(self._descendantrev)
636 return self._adjustlinkrev(self._descendantrev)
696 else:
637 else:
697 return self._filelog.linkrev(self._filerev)
638 return self._filelog.linkrev(self._filerev)
698
639
699 @propertycache
640 @propertycache
700 def _filenode(self):
641 def _filenode(self):
701 if r'_fileid' in self.__dict__:
642 if r'_fileid' in self.__dict__:
702 return self._filelog.lookup(self._fileid)
643 return self._filelog.lookup(self._fileid)
703 else:
644 else:
704 return self._changectx.filenode(self._path)
645 return self._changectx.filenode(self._path)
705
646
706 @propertycache
647 @propertycache
707 def _filerev(self):
648 def _filerev(self):
708 return self._filelog.rev(self._filenode)
649 return self._filelog.rev(self._filenode)
709
650
710 @propertycache
651 @propertycache
711 def _repopath(self):
652 def _repopath(self):
712 return self._path
653 return self._path
713
654
714 def __nonzero__(self):
655 def __nonzero__(self):
715 try:
656 try:
716 self._filenode
657 self._filenode
717 return True
658 return True
718 except error.LookupError:
659 except error.LookupError:
719 # file is missing
660 # file is missing
720 return False
661 return False
721
662
722 __bool__ = __nonzero__
663 __bool__ = __nonzero__
723
664
724 def __bytes__(self):
665 def __bytes__(self):
725 try:
666 try:
726 return "%s@%s" % (self.path(), self._changectx)
667 return "%s@%s" % (self.path(), self._changectx)
727 except error.LookupError:
668 except error.LookupError:
728 return "%s@???" % self.path()
669 return "%s@???" % self.path()
729
670
730 __str__ = encoding.strmethod(__bytes__)
671 __str__ = encoding.strmethod(__bytes__)
731
672
732 def __repr__(self):
673 def __repr__(self):
733 return r"<%s %s>" % (type(self).__name__, str(self))
674 return r"<%s %s>" % (type(self).__name__, str(self))
734
675
735 def __hash__(self):
676 def __hash__(self):
736 try:
677 try:
737 return hash((self._path, self._filenode))
678 return hash((self._path, self._filenode))
738 except AttributeError:
679 except AttributeError:
739 return id(self)
680 return id(self)
740
681
741 def __eq__(self, other):
682 def __eq__(self, other):
742 try:
683 try:
743 return (type(self) == type(other) and self._path == other._path
684 return (type(self) == type(other) and self._path == other._path
744 and self._filenode == other._filenode)
685 and self._filenode == other._filenode)
745 except AttributeError:
686 except AttributeError:
746 return False
687 return False
747
688
748 def __ne__(self, other):
689 def __ne__(self, other):
749 return not (self == other)
690 return not (self == other)
750
691
751 def filerev(self):
692 def filerev(self):
752 return self._filerev
693 return self._filerev
753 def filenode(self):
694 def filenode(self):
754 return self._filenode
695 return self._filenode
755 @propertycache
696 @propertycache
756 def _flags(self):
697 def _flags(self):
757 return self._changectx.flags(self._path)
698 return self._changectx.flags(self._path)
758 def flags(self):
699 def flags(self):
759 return self._flags
700 return self._flags
760 def filelog(self):
701 def filelog(self):
761 return self._filelog
702 return self._filelog
762 def rev(self):
703 def rev(self):
763 return self._changeid
704 return self._changeid
764 def linkrev(self):
705 def linkrev(self):
765 return self._filelog.linkrev(self._filerev)
706 return self._filelog.linkrev(self._filerev)
766 def node(self):
707 def node(self):
767 return self._changectx.node()
708 return self._changectx.node()
768 def hex(self):
709 def hex(self):
769 return self._changectx.hex()
710 return self._changectx.hex()
770 def user(self):
711 def user(self):
771 return self._changectx.user()
712 return self._changectx.user()
772 def date(self):
713 def date(self):
773 return self._changectx.date()
714 return self._changectx.date()
774 def files(self):
715 def files(self):
775 return self._changectx.files()
716 return self._changectx.files()
776 def description(self):
717 def description(self):
777 return self._changectx.description()
718 return self._changectx.description()
778 def branch(self):
719 def branch(self):
779 return self._changectx.branch()
720 return self._changectx.branch()
780 def extra(self):
721 def extra(self):
781 return self._changectx.extra()
722 return self._changectx.extra()
782 def phase(self):
723 def phase(self):
783 return self._changectx.phase()
724 return self._changectx.phase()
784 def phasestr(self):
725 def phasestr(self):
785 return self._changectx.phasestr()
726 return self._changectx.phasestr()
786 def obsolete(self):
727 def obsolete(self):
787 return self._changectx.obsolete()
728 return self._changectx.obsolete()
788 def instabilities(self):
729 def instabilities(self):
789 return self._changectx.instabilities()
730 return self._changectx.instabilities()
790 def manifest(self):
731 def manifest(self):
791 return self._changectx.manifest()
732 return self._changectx.manifest()
792 def changectx(self):
733 def changectx(self):
793 return self._changectx
734 return self._changectx
794 def renamed(self):
735 def renamed(self):
795 return self._copied
736 return self._copied
796 def repo(self):
737 def repo(self):
797 return self._repo
738 return self._repo
798 def size(self):
739 def size(self):
799 return len(self.data())
740 return len(self.data())
800
741
801 def path(self):
742 def path(self):
802 return self._path
743 return self._path
803
744
804 def isbinary(self):
745 def isbinary(self):
805 try:
746 try:
806 return stringutil.binary(self.data())
747 return stringutil.binary(self.data())
807 except IOError:
748 except IOError:
808 return False
749 return False
809 def isexec(self):
750 def isexec(self):
810 return 'x' in self.flags()
751 return 'x' in self.flags()
811 def islink(self):
752 def islink(self):
812 return 'l' in self.flags()
753 return 'l' in self.flags()
813
754
814 def isabsent(self):
755 def isabsent(self):
815 """whether this filectx represents a file not in self._changectx
756 """whether this filectx represents a file not in self._changectx
816
757
817 This is mainly for merge code to detect change/delete conflicts. This is
758 This is mainly for merge code to detect change/delete conflicts. This is
818 expected to be True for all subclasses of basectx."""
759 expected to be True for all subclasses of basectx."""
819 return False
760 return False
820
761
821 _customcmp = False
762 _customcmp = False
822 def cmp(self, fctx):
763 def cmp(self, fctx):
823 """compare with other file context
764 """compare with other file context
824
765
825 returns True if different than fctx.
766 returns True if different than fctx.
826 """
767 """
827 if fctx._customcmp:
768 if fctx._customcmp:
828 return fctx.cmp(self)
769 return fctx.cmp(self)
829
770
830 if (fctx._filenode is None
771 if (fctx._filenode is None
831 and (self._repo._encodefilterpats
772 and (self._repo._encodefilterpats
832 # if file data starts with '\1\n', empty metadata block is
773 # if file data starts with '\1\n', empty metadata block is
833 # prepended, which adds 4 bytes to filelog.size().
774 # prepended, which adds 4 bytes to filelog.size().
834 or self.size() - 4 == fctx.size())
775 or self.size() - 4 == fctx.size())
835 or self.size() == fctx.size()):
776 or self.size() == fctx.size()):
836 return self._filelog.cmp(self._filenode, fctx.data())
777 return self._filelog.cmp(self._filenode, fctx.data())
837
778
838 return True
779 return True
839
780
840 def _adjustlinkrev(self, srcrev, inclusive=False):
781 def _adjustlinkrev(self, srcrev, inclusive=False):
841 """return the first ancestor of <srcrev> introducing <fnode>
782 """return the first ancestor of <srcrev> introducing <fnode>
842
783
843 If the linkrev of the file revision does not point to an ancestor of
784 If the linkrev of the file revision does not point to an ancestor of
844 srcrev, we'll walk down the ancestors until we find one introducing
785 srcrev, we'll walk down the ancestors until we find one introducing
845 this file revision.
786 this file revision.
846
787
847 :srcrev: the changeset revision we search ancestors from
788 :srcrev: the changeset revision we search ancestors from
848 :inclusive: if true, the src revision will also be checked
789 :inclusive: if true, the src revision will also be checked
849 """
790 """
850 repo = self._repo
791 repo = self._repo
851 cl = repo.unfiltered().changelog
792 cl = repo.unfiltered().changelog
852 mfl = repo.manifestlog
793 mfl = repo.manifestlog
853 # fetch the linkrev
794 # fetch the linkrev
854 lkr = self.linkrev()
795 lkr = self.linkrev()
855 # hack to reuse ancestor computation when searching for renames
796 # hack to reuse ancestor computation when searching for renames
856 memberanc = getattr(self, '_ancestrycontext', None)
797 memberanc = getattr(self, '_ancestrycontext', None)
857 iteranc = None
798 iteranc = None
858 if srcrev is None:
799 if srcrev is None:
859 # wctx case, used by workingfilectx during mergecopy
800 # wctx case, used by workingfilectx during mergecopy
860 revs = [p.rev() for p in self._repo[None].parents()]
801 revs = [p.rev() for p in self._repo[None].parents()]
861 inclusive = True # we skipped the real (revless) source
802 inclusive = True # we skipped the real (revless) source
862 else:
803 else:
863 revs = [srcrev]
804 revs = [srcrev]
864 if memberanc is None:
805 if memberanc is None:
865 memberanc = iteranc = cl.ancestors(revs, lkr,
806 memberanc = iteranc = cl.ancestors(revs, lkr,
866 inclusive=inclusive)
807 inclusive=inclusive)
867 # check if this linkrev is an ancestor of srcrev
808 # check if this linkrev is an ancestor of srcrev
868 if lkr not in memberanc:
809 if lkr not in memberanc:
869 if iteranc is None:
810 if iteranc is None:
870 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
811 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
871 fnode = self._filenode
812 fnode = self._filenode
872 path = self._path
813 path = self._path
873 for a in iteranc:
814 for a in iteranc:
874 ac = cl.read(a) # get changeset data (we avoid object creation)
815 ac = cl.read(a) # get changeset data (we avoid object creation)
875 if path in ac[3]: # checking the 'files' field.
816 if path in ac[3]: # checking the 'files' field.
876 # The file has been touched, check if the content is
817 # The file has been touched, check if the content is
877 # similar to the one we search for.
818 # similar to the one we search for.
878 if fnode == mfl[ac[0]].readfast().get(path):
819 if fnode == mfl[ac[0]].readfast().get(path):
879 return a
820 return a
880 # In theory, we should never get out of that loop without a result.
821 # In theory, we should never get out of that loop without a result.
881 # But if manifest uses a buggy file revision (not children of the
822 # But if manifest uses a buggy file revision (not children of the
882 # one it replaces) we could. Such a buggy situation will likely
823 # one it replaces) we could. Such a buggy situation will likely
883 # result is crash somewhere else at to some point.
824 # result is crash somewhere else at to some point.
884 return lkr
825 return lkr
885
826
886 def introrev(self):
827 def introrev(self):
887 """return the rev of the changeset which introduced this file revision
828 """return the rev of the changeset which introduced this file revision
888
829
889 This method is different from linkrev because it take into account the
830 This method is different from linkrev because it take into account the
890 changeset the filectx was created from. It ensures the returned
831 changeset the filectx was created from. It ensures the returned
891 revision is one of its ancestors. This prevents bugs from
832 revision is one of its ancestors. This prevents bugs from
892 'linkrev-shadowing' when a file revision is used by multiple
833 'linkrev-shadowing' when a file revision is used by multiple
893 changesets.
834 changesets.
894 """
835 """
895 lkr = self.linkrev()
836 lkr = self.linkrev()
896 attrs = vars(self)
837 attrs = vars(self)
897 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
838 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
898 if noctx or self.rev() == lkr:
839 if noctx or self.rev() == lkr:
899 return self.linkrev()
840 return self.linkrev()
900 return self._adjustlinkrev(self.rev(), inclusive=True)
841 return self._adjustlinkrev(self.rev(), inclusive=True)
901
842
902 def introfilectx(self):
843 def introfilectx(self):
903 """Return filectx having identical contents, but pointing to the
844 """Return filectx having identical contents, but pointing to the
904 changeset revision where this filectx was introduced"""
845 changeset revision where this filectx was introduced"""
905 introrev = self.introrev()
846 introrev = self.introrev()
906 if self.rev() == introrev:
847 if self.rev() == introrev:
907 return self
848 return self
908 return self.filectx(self.filenode(), changeid=introrev)
849 return self.filectx(self.filenode(), changeid=introrev)
909
850
910 def _parentfilectx(self, path, fileid, filelog):
851 def _parentfilectx(self, path, fileid, filelog):
911 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
852 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
912 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
853 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
913 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
854 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
914 # If self is associated with a changeset (probably explicitly
855 # If self is associated with a changeset (probably explicitly
915 # fed), ensure the created filectx is associated with a
856 # fed), ensure the created filectx is associated with a
916 # changeset that is an ancestor of self.changectx.
857 # changeset that is an ancestor of self.changectx.
917 # This lets us later use _adjustlinkrev to get a correct link.
858 # This lets us later use _adjustlinkrev to get a correct link.
918 fctx._descendantrev = self.rev()
859 fctx._descendantrev = self.rev()
919 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
860 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
920 elif r'_descendantrev' in vars(self):
861 elif r'_descendantrev' in vars(self):
921 # Otherwise propagate _descendantrev if we have one associated.
862 # Otherwise propagate _descendantrev if we have one associated.
922 fctx._descendantrev = self._descendantrev
863 fctx._descendantrev = self._descendantrev
923 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
864 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
924 return fctx
865 return fctx
925
866
926 def parents(self):
867 def parents(self):
927 _path = self._path
868 _path = self._path
928 fl = self._filelog
869 fl = self._filelog
929 parents = self._filelog.parents(self._filenode)
870 parents = self._filelog.parents(self._filenode)
930 pl = [(_path, node, fl) for node in parents if node != nullid]
871 pl = [(_path, node, fl) for node in parents if node != nullid]
931
872
932 r = fl.renamed(self._filenode)
873 r = fl.renamed(self._filenode)
933 if r:
874 if r:
934 # - In the simple rename case, both parent are nullid, pl is empty.
875 # - In the simple rename case, both parent are nullid, pl is empty.
935 # - In case of merge, only one of the parent is null id and should
876 # - In case of merge, only one of the parent is null id and should
936 # be replaced with the rename information. This parent is -always-
877 # be replaced with the rename information. This parent is -always-
937 # the first one.
878 # the first one.
938 #
879 #
939 # As null id have always been filtered out in the previous list
880 # As null id have always been filtered out in the previous list
940 # comprehension, inserting to 0 will always result in "replacing
881 # comprehension, inserting to 0 will always result in "replacing
941 # first nullid parent with rename information.
882 # first nullid parent with rename information.
942 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
883 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
943
884
944 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
885 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
945
886
946 def p1(self):
887 def p1(self):
947 return self.parents()[0]
888 return self.parents()[0]
948
889
949 def p2(self):
890 def p2(self):
950 p = self.parents()
891 p = self.parents()
951 if len(p) == 2:
892 if len(p) == 2:
952 return p[1]
893 return p[1]
953 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
894 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
954
895
955 def annotate(self, follow=False, skiprevs=None, diffopts=None):
896 def annotate(self, follow=False, skiprevs=None, diffopts=None):
956 """Returns a list of annotateline objects for each line in the file
897 """Returns a list of annotateline objects for each line in the file
957
898
958 - line.fctx is the filectx of the node where that line was last changed
899 - line.fctx is the filectx of the node where that line was last changed
959 - line.lineno is the line number at the first appearance in the managed
900 - line.lineno is the line number at the first appearance in the managed
960 file
901 file
961 - line.text is the data on that line (including newline character)
902 - line.text is the data on that line (including newline character)
962 """
903 """
963 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
904 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
964
905
965 def parents(f):
906 def parents(f):
966 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
907 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
967 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
908 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
968 # from the topmost introrev (= srcrev) down to p.linkrev() if it
909 # from the topmost introrev (= srcrev) down to p.linkrev() if it
969 # isn't an ancestor of the srcrev.
910 # isn't an ancestor of the srcrev.
970 f._changeid
911 f._changeid
971 pl = f.parents()
912 pl = f.parents()
972
913
973 # Don't return renamed parents if we aren't following.
914 # Don't return renamed parents if we aren't following.
974 if not follow:
915 if not follow:
975 pl = [p for p in pl if p.path() == f.path()]
916 pl = [p for p in pl if p.path() == f.path()]
976
917
977 # renamed filectx won't have a filelog yet, so set it
918 # renamed filectx won't have a filelog yet, so set it
978 # from the cache to save time
919 # from the cache to save time
979 for p in pl:
920 for p in pl:
980 if not r'_filelog' in p.__dict__:
921 if not r'_filelog' in p.__dict__:
981 p._filelog = getlog(p.path())
922 p._filelog = getlog(p.path())
982
923
983 return pl
924 return pl
984
925
985 # use linkrev to find the first changeset where self appeared
926 # use linkrev to find the first changeset where self appeared
986 base = self.introfilectx()
927 base = self.introfilectx()
987 if getattr(base, '_ancestrycontext', None) is None:
928 if getattr(base, '_ancestrycontext', None) is None:
988 cl = self._repo.changelog
929 cl = self._repo.changelog
989 if base.rev() is None:
930 if base.rev() is None:
990 # wctx is not inclusive, but works because _ancestrycontext
931 # wctx is not inclusive, but works because _ancestrycontext
991 # is used to test filelog revisions
932 # is used to test filelog revisions
992 ac = cl.ancestors([p.rev() for p in base.parents()],
933 ac = cl.ancestors([p.rev() for p in base.parents()],
993 inclusive=True)
934 inclusive=True)
994 else:
935 else:
995 ac = cl.ancestors([base.rev()], inclusive=True)
936 ac = cl.ancestors([base.rev()], inclusive=True)
996 base._ancestrycontext = ac
937 base._ancestrycontext = ac
997
938
998 return dagop.annotate(base, parents, skiprevs=skiprevs,
939 return dagop.annotate(base, parents, skiprevs=skiprevs,
999 diffopts=diffopts)
940 diffopts=diffopts)
1000
941
1001 def ancestors(self, followfirst=False):
942 def ancestors(self, followfirst=False):
1002 visit = {}
943 visit = {}
1003 c = self
944 c = self
1004 if followfirst:
945 if followfirst:
1005 cut = 1
946 cut = 1
1006 else:
947 else:
1007 cut = None
948 cut = None
1008
949
1009 while True:
950 while True:
1010 for parent in c.parents()[:cut]:
951 for parent in c.parents()[:cut]:
1011 visit[(parent.linkrev(), parent.filenode())] = parent
952 visit[(parent.linkrev(), parent.filenode())] = parent
1012 if not visit:
953 if not visit:
1013 break
954 break
1014 c = visit.pop(max(visit))
955 c = visit.pop(max(visit))
1015 yield c
956 yield c
1016
957
1017 def decodeddata(self):
958 def decodeddata(self):
1018 """Returns `data()` after running repository decoding filters.
959 """Returns `data()` after running repository decoding filters.
1019
960
1020 This is often equivalent to how the data would be expressed on disk.
961 This is often equivalent to how the data would be expressed on disk.
1021 """
962 """
1022 return self._repo.wwritedata(self.path(), self.data())
963 return self._repo.wwritedata(self.path(), self.data())
1023
964
1024 class filectx(basefilectx):
965 class filectx(basefilectx):
1025 """A filecontext object makes access to data related to a particular
966 """A filecontext object makes access to data related to a particular
1026 filerevision convenient."""
967 filerevision convenient."""
1027 def __init__(self, repo, path, changeid=None, fileid=None,
968 def __init__(self, repo, path, changeid=None, fileid=None,
1028 filelog=None, changectx=None):
969 filelog=None, changectx=None):
1029 """changeid can be a changeset revision, node, or tag.
970 """changeid can be a changeset revision, node, or tag.
1030 fileid can be a file revision or node."""
971 fileid can be a file revision or node."""
1031 self._repo = repo
972 self._repo = repo
1032 self._path = path
973 self._path = path
1033
974
1034 assert (changeid is not None
975 assert (changeid is not None
1035 or fileid is not None
976 or fileid is not None
1036 or changectx is not None), \
977 or changectx is not None), \
1037 ("bad args: changeid=%r, fileid=%r, changectx=%r"
978 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1038 % (changeid, fileid, changectx))
979 % (changeid, fileid, changectx))
1039
980
1040 if filelog is not None:
981 if filelog is not None:
1041 self._filelog = filelog
982 self._filelog = filelog
1042
983
1043 if changeid is not None:
984 if changeid is not None:
1044 self._changeid = changeid
985 self._changeid = changeid
1045 if changectx is not None:
986 if changectx is not None:
1046 self._changectx = changectx
987 self._changectx = changectx
1047 if fileid is not None:
988 if fileid is not None:
1048 self._fileid = fileid
989 self._fileid = fileid
1049
990
1050 @propertycache
991 @propertycache
1051 def _changectx(self):
992 def _changectx(self):
1052 try:
993 try:
1053 return changectx(self._repo, self._changeid)
994 return changectx(self._repo, self._changeid)
1054 except error.FilteredRepoLookupError:
995 except error.FilteredRepoLookupError:
1055 # Linkrev may point to any revision in the repository. When the
996 # Linkrev may point to any revision in the repository. When the
1056 # repository is filtered this may lead to `filectx` trying to build
997 # repository is filtered this may lead to `filectx` trying to build
1057 # `changectx` for filtered revision. In such case we fallback to
998 # `changectx` for filtered revision. In such case we fallback to
1058 # creating `changectx` on the unfiltered version of the reposition.
999 # creating `changectx` on the unfiltered version of the reposition.
1059 # This fallback should not be an issue because `changectx` from
1000 # This fallback should not be an issue because `changectx` from
1060 # `filectx` are not used in complex operations that care about
1001 # `filectx` are not used in complex operations that care about
1061 # filtering.
1002 # filtering.
1062 #
1003 #
1063 # This fallback is a cheap and dirty fix that prevent several
1004 # This fallback is a cheap and dirty fix that prevent several
1064 # crashes. It does not ensure the behavior is correct. However the
1005 # crashes. It does not ensure the behavior is correct. However the
1065 # behavior was not correct before filtering either and "incorrect
1006 # behavior was not correct before filtering either and "incorrect
1066 # behavior" is seen as better as "crash"
1007 # behavior" is seen as better as "crash"
1067 #
1008 #
1068 # Linkrevs have several serious troubles with filtering that are
1009 # Linkrevs have several serious troubles with filtering that are
1069 # complicated to solve. Proper handling of the issue here should be
1010 # complicated to solve. Proper handling of the issue here should be
1070 # considered when solving linkrev issue are on the table.
1011 # considered when solving linkrev issue are on the table.
1071 return changectx(self._repo.unfiltered(), self._changeid)
1012 return changectx(self._repo.unfiltered(), self._changeid)
1072
1013
1073 def filectx(self, fileid, changeid=None):
1014 def filectx(self, fileid, changeid=None):
1074 '''opens an arbitrary revision of the file without
1015 '''opens an arbitrary revision of the file without
1075 opening a new filelog'''
1016 opening a new filelog'''
1076 return filectx(self._repo, self._path, fileid=fileid,
1017 return filectx(self._repo, self._path, fileid=fileid,
1077 filelog=self._filelog, changeid=changeid)
1018 filelog=self._filelog, changeid=changeid)
1078
1019
1079 def rawdata(self):
1020 def rawdata(self):
1080 return self._filelog.revision(self._filenode, raw=True)
1021 return self._filelog.revision(self._filenode, raw=True)
1081
1022
1082 def rawflags(self):
1023 def rawflags(self):
1083 """low-level revlog flags"""
1024 """low-level revlog flags"""
1084 return self._filelog.flags(self._filerev)
1025 return self._filelog.flags(self._filerev)
1085
1026
1086 def data(self):
1027 def data(self):
1087 try:
1028 try:
1088 return self._filelog.read(self._filenode)
1029 return self._filelog.read(self._filenode)
1089 except error.CensoredNodeError:
1030 except error.CensoredNodeError:
1090 if self._repo.ui.config("censor", "policy") == "ignore":
1031 if self._repo.ui.config("censor", "policy") == "ignore":
1091 return ""
1032 return ""
1092 raise error.Abort(_("censored node: %s") % short(self._filenode),
1033 raise error.Abort(_("censored node: %s") % short(self._filenode),
1093 hint=_("set censor.policy to ignore errors"))
1034 hint=_("set censor.policy to ignore errors"))
1094
1035
1095 def size(self):
1036 def size(self):
1096 return self._filelog.size(self._filerev)
1037 return self._filelog.size(self._filerev)
1097
1038
1098 @propertycache
1039 @propertycache
1099 def _copied(self):
1040 def _copied(self):
1100 """check if file was actually renamed in this changeset revision
1041 """check if file was actually renamed in this changeset revision
1101
1042
1102 If rename logged in file revision, we report copy for changeset only
1043 If rename logged in file revision, we report copy for changeset only
1103 if file revisions linkrev points back to the changeset in question
1044 if file revisions linkrev points back to the changeset in question
1104 or both changeset parents contain different file revisions.
1045 or both changeset parents contain different file revisions.
1105 """
1046 """
1106
1047
1107 renamed = self._filelog.renamed(self._filenode)
1048 renamed = self._filelog.renamed(self._filenode)
1108 if not renamed:
1049 if not renamed:
1109 return renamed
1050 return renamed
1110
1051
1111 if self.rev() == self.linkrev():
1052 if self.rev() == self.linkrev():
1112 return renamed
1053 return renamed
1113
1054
1114 name = self.path()
1055 name = self.path()
1115 fnode = self._filenode
1056 fnode = self._filenode
1116 for p in self._changectx.parents():
1057 for p in self._changectx.parents():
1117 try:
1058 try:
1118 if fnode == p.filenode(name):
1059 if fnode == p.filenode(name):
1119 return None
1060 return None
1120 except error.LookupError:
1061 except error.LookupError:
1121 pass
1062 pass
1122 return renamed
1063 return renamed
1123
1064
1124 def children(self):
1065 def children(self):
1125 # hard for renames
1066 # hard for renames
1126 c = self._filelog.children(self._filenode)
1067 c = self._filelog.children(self._filenode)
1127 return [filectx(self._repo, self._path, fileid=x,
1068 return [filectx(self._repo, self._path, fileid=x,
1128 filelog=self._filelog) for x in c]
1069 filelog=self._filelog) for x in c]
1129
1070
1130 class committablectx(basectx):
1071 class committablectx(basectx):
1131 """A committablectx object provides common functionality for a context that
1072 """A committablectx object provides common functionality for a context that
1132 wants the ability to commit, e.g. workingctx or memctx."""
1073 wants the ability to commit, e.g. workingctx or memctx."""
1133 def __init__(self, repo, text="", user=None, date=None, extra=None,
1074 def __init__(self, repo, text="", user=None, date=None, extra=None,
1134 changes=None):
1075 changes=None):
1135 super(committablectx, self).__init__(repo)
1076 super(committablectx, self).__init__(repo)
1136 self._rev = None
1077 self._rev = None
1137 self._node = None
1078 self._node = None
1138 self._text = text
1079 self._text = text
1139 if date:
1080 if date:
1140 self._date = dateutil.parsedate(date)
1081 self._date = dateutil.parsedate(date)
1141 if user:
1082 if user:
1142 self._user = user
1083 self._user = user
1143 if changes:
1084 if changes:
1144 self._status = changes
1085 self._status = changes
1145
1086
1146 self._extra = {}
1087 self._extra = {}
1147 if extra:
1088 if extra:
1148 self._extra = extra.copy()
1089 self._extra = extra.copy()
1149 if 'branch' not in self._extra:
1090 if 'branch' not in self._extra:
1150 try:
1091 try:
1151 branch = encoding.fromlocal(self._repo.dirstate.branch())
1092 branch = encoding.fromlocal(self._repo.dirstate.branch())
1152 except UnicodeDecodeError:
1093 except UnicodeDecodeError:
1153 raise error.Abort(_('branch name not in UTF-8!'))
1094 raise error.Abort(_('branch name not in UTF-8!'))
1154 self._extra['branch'] = branch
1095 self._extra['branch'] = branch
1155 if self._extra['branch'] == '':
1096 if self._extra['branch'] == '':
1156 self._extra['branch'] = 'default'
1097 self._extra['branch'] = 'default'
1157
1098
1158 def __bytes__(self):
1099 def __bytes__(self):
1159 return bytes(self._parents[0]) + "+"
1100 return bytes(self._parents[0]) + "+"
1160
1101
1161 __str__ = encoding.strmethod(__bytes__)
1102 __str__ = encoding.strmethod(__bytes__)
1162
1103
1163 def __nonzero__(self):
1104 def __nonzero__(self):
1164 return True
1105 return True
1165
1106
1166 __bool__ = __nonzero__
1107 __bool__ = __nonzero__
1167
1108
1168 def _buildflagfunc(self):
1109 def _buildflagfunc(self):
1169 # Create a fallback function for getting file flags when the
1110 # Create a fallback function for getting file flags when the
1170 # filesystem doesn't support them
1111 # filesystem doesn't support them
1171
1112
1172 copiesget = self._repo.dirstate.copies().get
1113 copiesget = self._repo.dirstate.copies().get
1173 parents = self.parents()
1114 parents = self.parents()
1174 if len(parents) < 2:
1115 if len(parents) < 2:
1175 # when we have one parent, it's easy: copy from parent
1116 # when we have one parent, it's easy: copy from parent
1176 man = parents[0].manifest()
1117 man = parents[0].manifest()
1177 def func(f):
1118 def func(f):
1178 f = copiesget(f, f)
1119 f = copiesget(f, f)
1179 return man.flags(f)
1120 return man.flags(f)
1180 else:
1121 else:
1181 # merges are tricky: we try to reconstruct the unstored
1122 # merges are tricky: we try to reconstruct the unstored
1182 # result from the merge (issue1802)
1123 # result from the merge (issue1802)
1183 p1, p2 = parents
1124 p1, p2 = parents
1184 pa = p1.ancestor(p2)
1125 pa = p1.ancestor(p2)
1185 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1126 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1186
1127
1187 def func(f):
1128 def func(f):
1188 f = copiesget(f, f) # may be wrong for merges with copies
1129 f = copiesget(f, f) # may be wrong for merges with copies
1189 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1130 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1190 if fl1 == fl2:
1131 if fl1 == fl2:
1191 return fl1
1132 return fl1
1192 if fl1 == fla:
1133 if fl1 == fla:
1193 return fl2
1134 return fl2
1194 if fl2 == fla:
1135 if fl2 == fla:
1195 return fl1
1136 return fl1
1196 return '' # punt for conflicts
1137 return '' # punt for conflicts
1197
1138
1198 return func
1139 return func
1199
1140
1200 @propertycache
1141 @propertycache
1201 def _flagfunc(self):
1142 def _flagfunc(self):
1202 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1143 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1203
1144
1204 @propertycache
1145 @propertycache
1205 def _status(self):
1146 def _status(self):
1206 return self._repo.status()
1147 return self._repo.status()
1207
1148
1208 @propertycache
1149 @propertycache
1209 def _user(self):
1150 def _user(self):
1210 return self._repo.ui.username()
1151 return self._repo.ui.username()
1211
1152
1212 @propertycache
1153 @propertycache
1213 def _date(self):
1154 def _date(self):
1214 ui = self._repo.ui
1155 ui = self._repo.ui
1215 date = ui.configdate('devel', 'default-date')
1156 date = ui.configdate('devel', 'default-date')
1216 if date is None:
1157 if date is None:
1217 date = dateutil.makedate()
1158 date = dateutil.makedate()
1218 return date
1159 return date
1219
1160
1220 def subrev(self, subpath):
1161 def subrev(self, subpath):
1221 return None
1162 return None
1222
1163
1223 def manifestnode(self):
1164 def manifestnode(self):
1224 return None
1165 return None
1225 def user(self):
1166 def user(self):
1226 return self._user or self._repo.ui.username()
1167 return self._user or self._repo.ui.username()
1227 def date(self):
1168 def date(self):
1228 return self._date
1169 return self._date
1229 def description(self):
1170 def description(self):
1230 return self._text
1171 return self._text
1231 def files(self):
1172 def files(self):
1232 return sorted(self._status.modified + self._status.added +
1173 return sorted(self._status.modified + self._status.added +
1233 self._status.removed)
1174 self._status.removed)
1234
1175
1235 def modified(self):
1176 def modified(self):
1236 return self._status.modified
1177 return self._status.modified
1237 def added(self):
1178 def added(self):
1238 return self._status.added
1179 return self._status.added
1239 def removed(self):
1180 def removed(self):
1240 return self._status.removed
1181 return self._status.removed
1241 def deleted(self):
1182 def deleted(self):
1242 return self._status.deleted
1183 return self._status.deleted
1243 def branch(self):
1184 def branch(self):
1244 return encoding.tolocal(self._extra['branch'])
1185 return encoding.tolocal(self._extra['branch'])
1245 def closesbranch(self):
1186 def closesbranch(self):
1246 return 'close' in self._extra
1187 return 'close' in self._extra
1247 def extra(self):
1188 def extra(self):
1248 return self._extra
1189 return self._extra
1249
1190
1250 def isinmemory(self):
1191 def isinmemory(self):
1251 return False
1192 return False
1252
1193
1253 def tags(self):
1194 def tags(self):
1254 return []
1195 return []
1255
1196
1256 def bookmarks(self):
1197 def bookmarks(self):
1257 b = []
1198 b = []
1258 for p in self.parents():
1199 for p in self.parents():
1259 b.extend(p.bookmarks())
1200 b.extend(p.bookmarks())
1260 return b
1201 return b
1261
1202
1262 def phase(self):
1203 def phase(self):
1263 phase = phases.draft # default phase to draft
1204 phase = phases.draft # default phase to draft
1264 for p in self.parents():
1205 for p in self.parents():
1265 phase = max(phase, p.phase())
1206 phase = max(phase, p.phase())
1266 return phase
1207 return phase
1267
1208
1268 def hidden(self):
1209 def hidden(self):
1269 return False
1210 return False
1270
1211
1271 def children(self):
1212 def children(self):
1272 return []
1213 return []
1273
1214
1274 def flags(self, path):
1215 def flags(self, path):
1275 if r'_manifest' in self.__dict__:
1216 if r'_manifest' in self.__dict__:
1276 try:
1217 try:
1277 return self._manifest.flags(path)
1218 return self._manifest.flags(path)
1278 except KeyError:
1219 except KeyError:
1279 return ''
1220 return ''
1280
1221
1281 try:
1222 try:
1282 return self._flagfunc(path)
1223 return self._flagfunc(path)
1283 except OSError:
1224 except OSError:
1284 return ''
1225 return ''
1285
1226
1286 def ancestor(self, c2):
1227 def ancestor(self, c2):
1287 """return the "best" ancestor context of self and c2"""
1228 """return the "best" ancestor context of self and c2"""
1288 return self._parents[0].ancestor(c2) # punt on two parents for now
1229 return self._parents[0].ancestor(c2) # punt on two parents for now
1289
1230
1290 def walk(self, match):
1231 def walk(self, match):
1291 '''Generates matching file names.'''
1232 '''Generates matching file names.'''
1292 return sorted(self._repo.dirstate.walk(match,
1233 return sorted(self._repo.dirstate.walk(match,
1293 subrepos=sorted(self.substate),
1234 subrepos=sorted(self.substate),
1294 unknown=True, ignored=False))
1235 unknown=True, ignored=False))
1295
1236
1296 def matches(self, match):
1237 def matches(self, match):
1297 return sorted(self._repo.dirstate.matches(match))
1238 return sorted(self._repo.dirstate.matches(match))
1298
1239
1299 def ancestors(self):
1240 def ancestors(self):
1300 for p in self._parents:
1241 for p in self._parents:
1301 yield p
1242 yield p
1302 for a in self._repo.changelog.ancestors(
1243 for a in self._repo.changelog.ancestors(
1303 [p.rev() for p in self._parents]):
1244 [p.rev() for p in self._parents]):
1304 yield changectx(self._repo, a)
1245 yield changectx(self._repo, a)
1305
1246
1306 def markcommitted(self, node):
1247 def markcommitted(self, node):
1307 """Perform post-commit cleanup necessary after committing this ctx
1248 """Perform post-commit cleanup necessary after committing this ctx
1308
1249
1309 Specifically, this updates backing stores this working context
1250 Specifically, this updates backing stores this working context
1310 wraps to reflect the fact that the changes reflected by this
1251 wraps to reflect the fact that the changes reflected by this
1311 workingctx have been committed. For example, it marks
1252 workingctx have been committed. For example, it marks
1312 modified and added files as normal in the dirstate.
1253 modified and added files as normal in the dirstate.
1313
1254
1314 """
1255 """
1315
1256
1316 with self._repo.dirstate.parentchange():
1257 with self._repo.dirstate.parentchange():
1317 for f in self.modified() + self.added():
1258 for f in self.modified() + self.added():
1318 self._repo.dirstate.normal(f)
1259 self._repo.dirstate.normal(f)
1319 for f in self.removed():
1260 for f in self.removed():
1320 self._repo.dirstate.drop(f)
1261 self._repo.dirstate.drop(f)
1321 self._repo.dirstate.setparents(node)
1262 self._repo.dirstate.setparents(node)
1322
1263
1323 # write changes out explicitly, because nesting wlock at
1264 # write changes out explicitly, because nesting wlock at
1324 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1265 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1325 # from immediately doing so for subsequent changing files
1266 # from immediately doing so for subsequent changing files
1326 self._repo.dirstate.write(self._repo.currenttransaction())
1267 self._repo.dirstate.write(self._repo.currenttransaction())
1327
1268
1328 def dirty(self, missing=False, merge=True, branch=True):
1269 def dirty(self, missing=False, merge=True, branch=True):
1329 return False
1270 return False
1330
1271
1331 class workingctx(committablectx):
1272 class workingctx(committablectx):
1332 """A workingctx object makes access to data related to
1273 """A workingctx object makes access to data related to
1333 the current working directory convenient.
1274 the current working directory convenient.
1334 date - any valid date string or (unixtime, offset), or None.
1275 date - any valid date string or (unixtime, offset), or None.
1335 user - username string, or None.
1276 user - username string, or None.
1336 extra - a dictionary of extra values, or None.
1277 extra - a dictionary of extra values, or None.
1337 changes - a list of file lists as returned by localrepo.status()
1278 changes - a list of file lists as returned by localrepo.status()
1338 or None to use the repository status.
1279 or None to use the repository status.
1339 """
1280 """
1340 def __init__(self, repo, text="", user=None, date=None, extra=None,
1281 def __init__(self, repo, text="", user=None, date=None, extra=None,
1341 changes=None):
1282 changes=None):
1342 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1283 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1343
1284
1344 def __iter__(self):
1285 def __iter__(self):
1345 d = self._repo.dirstate
1286 d = self._repo.dirstate
1346 for f in d:
1287 for f in d:
1347 if d[f] != 'r':
1288 if d[f] != 'r':
1348 yield f
1289 yield f
1349
1290
1350 def __contains__(self, key):
1291 def __contains__(self, key):
1351 return self._repo.dirstate[key] not in "?r"
1292 return self._repo.dirstate[key] not in "?r"
1352
1293
1353 def hex(self):
1294 def hex(self):
1354 return hex(wdirid)
1295 return hex(wdirid)
1355
1296
1356 @propertycache
1297 @propertycache
1357 def _parents(self):
1298 def _parents(self):
1358 p = self._repo.dirstate.parents()
1299 p = self._repo.dirstate.parents()
1359 if p[1] == nullid:
1300 if p[1] == nullid:
1360 p = p[:-1]
1301 p = p[:-1]
1361 return [changectx(self._repo, x) for x in p]
1302 return [changectx(self._repo, x) for x in p]
1362
1303
1363 def _fileinfo(self, path):
1304 def _fileinfo(self, path):
1364 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1305 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1365 self._manifest
1306 self._manifest
1366 return super(workingctx, self)._fileinfo(path)
1307 return super(workingctx, self)._fileinfo(path)
1367
1308
1368 def filectx(self, path, filelog=None):
1309 def filectx(self, path, filelog=None):
1369 """get a file context from the working directory"""
1310 """get a file context from the working directory"""
1370 return workingfilectx(self._repo, path, workingctx=self,
1311 return workingfilectx(self._repo, path, workingctx=self,
1371 filelog=filelog)
1312 filelog=filelog)
1372
1313
1373 def dirty(self, missing=False, merge=True, branch=True):
1314 def dirty(self, missing=False, merge=True, branch=True):
1374 "check whether a working directory is modified"
1315 "check whether a working directory is modified"
1375 # check subrepos first
1316 # check subrepos first
1376 for s in sorted(self.substate):
1317 for s in sorted(self.substate):
1377 if self.sub(s).dirty(missing=missing):
1318 if self.sub(s).dirty(missing=missing):
1378 return True
1319 return True
1379 # check current working dir
1320 # check current working dir
1380 return ((merge and self.p2()) or
1321 return ((merge and self.p2()) or
1381 (branch and self.branch() != self.p1().branch()) or
1322 (branch and self.branch() != self.p1().branch()) or
1382 self.modified() or self.added() or self.removed() or
1323 self.modified() or self.added() or self.removed() or
1383 (missing and self.deleted()))
1324 (missing and self.deleted()))
1384
1325
1385 def add(self, list, prefix=""):
1326 def add(self, list, prefix=""):
1386 with self._repo.wlock():
1327 with self._repo.wlock():
1387 ui, ds = self._repo.ui, self._repo.dirstate
1328 ui, ds = self._repo.ui, self._repo.dirstate
1388 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1329 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1389 rejected = []
1330 rejected = []
1390 lstat = self._repo.wvfs.lstat
1331 lstat = self._repo.wvfs.lstat
1391 for f in list:
1332 for f in list:
1392 # ds.pathto() returns an absolute file when this is invoked from
1333 # ds.pathto() returns an absolute file when this is invoked from
1393 # the keyword extension. That gets flagged as non-portable on
1334 # the keyword extension. That gets flagged as non-portable on
1394 # Windows, since it contains the drive letter and colon.
1335 # Windows, since it contains the drive letter and colon.
1395 scmutil.checkportable(ui, os.path.join(prefix, f))
1336 scmutil.checkportable(ui, os.path.join(prefix, f))
1396 try:
1337 try:
1397 st = lstat(f)
1338 st = lstat(f)
1398 except OSError:
1339 except OSError:
1399 ui.warn(_("%s does not exist!\n") % uipath(f))
1340 ui.warn(_("%s does not exist!\n") % uipath(f))
1400 rejected.append(f)
1341 rejected.append(f)
1401 continue
1342 continue
1402 if st.st_size > 10000000:
1343 if st.st_size > 10000000:
1403 ui.warn(_("%s: up to %d MB of RAM may be required "
1344 ui.warn(_("%s: up to %d MB of RAM may be required "
1404 "to manage this file\n"
1345 "to manage this file\n"
1405 "(use 'hg revert %s' to cancel the "
1346 "(use 'hg revert %s' to cancel the "
1406 "pending addition)\n")
1347 "pending addition)\n")
1407 % (f, 3 * st.st_size // 1000000, uipath(f)))
1348 % (f, 3 * st.st_size // 1000000, uipath(f)))
1408 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1349 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1409 ui.warn(_("%s not added: only files and symlinks "
1350 ui.warn(_("%s not added: only files and symlinks "
1410 "supported currently\n") % uipath(f))
1351 "supported currently\n") % uipath(f))
1411 rejected.append(f)
1352 rejected.append(f)
1412 elif ds[f] in 'amn':
1353 elif ds[f] in 'amn':
1413 ui.warn(_("%s already tracked!\n") % uipath(f))
1354 ui.warn(_("%s already tracked!\n") % uipath(f))
1414 elif ds[f] == 'r':
1355 elif ds[f] == 'r':
1415 ds.normallookup(f)
1356 ds.normallookup(f)
1416 else:
1357 else:
1417 ds.add(f)
1358 ds.add(f)
1418 return rejected
1359 return rejected
1419
1360
1420 def forget(self, files, prefix=""):
1361 def forget(self, files, prefix=""):
1421 with self._repo.wlock():
1362 with self._repo.wlock():
1422 ds = self._repo.dirstate
1363 ds = self._repo.dirstate
1423 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1364 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1424 rejected = []
1365 rejected = []
1425 for f in files:
1366 for f in files:
1426 if f not in self._repo.dirstate:
1367 if f not in self._repo.dirstate:
1427 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1368 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1428 rejected.append(f)
1369 rejected.append(f)
1429 elif self._repo.dirstate[f] != 'a':
1370 elif self._repo.dirstate[f] != 'a':
1430 self._repo.dirstate.remove(f)
1371 self._repo.dirstate.remove(f)
1431 else:
1372 else:
1432 self._repo.dirstate.drop(f)
1373 self._repo.dirstate.drop(f)
1433 return rejected
1374 return rejected
1434
1375
1435 def undelete(self, list):
1376 def undelete(self, list):
1436 pctxs = self.parents()
1377 pctxs = self.parents()
1437 with self._repo.wlock():
1378 with self._repo.wlock():
1438 ds = self._repo.dirstate
1379 ds = self._repo.dirstate
1439 for f in list:
1380 for f in list:
1440 if self._repo.dirstate[f] != 'r':
1381 if self._repo.dirstate[f] != 'r':
1441 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1382 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1442 else:
1383 else:
1443 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1384 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1444 t = fctx.data()
1385 t = fctx.data()
1445 self._repo.wwrite(f, t, fctx.flags())
1386 self._repo.wwrite(f, t, fctx.flags())
1446 self._repo.dirstate.normal(f)
1387 self._repo.dirstate.normal(f)
1447
1388
1448 def copy(self, source, dest):
1389 def copy(self, source, dest):
1449 try:
1390 try:
1450 st = self._repo.wvfs.lstat(dest)
1391 st = self._repo.wvfs.lstat(dest)
1451 except OSError as err:
1392 except OSError as err:
1452 if err.errno != errno.ENOENT:
1393 if err.errno != errno.ENOENT:
1453 raise
1394 raise
1454 self._repo.ui.warn(_("%s does not exist!\n")
1395 self._repo.ui.warn(_("%s does not exist!\n")
1455 % self._repo.dirstate.pathto(dest))
1396 % self._repo.dirstate.pathto(dest))
1456 return
1397 return
1457 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1398 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1458 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1399 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1459 "symbolic link\n")
1400 "symbolic link\n")
1460 % self._repo.dirstate.pathto(dest))
1401 % self._repo.dirstate.pathto(dest))
1461 else:
1402 else:
1462 with self._repo.wlock():
1403 with self._repo.wlock():
1463 if self._repo.dirstate[dest] in '?':
1404 if self._repo.dirstate[dest] in '?':
1464 self._repo.dirstate.add(dest)
1405 self._repo.dirstate.add(dest)
1465 elif self._repo.dirstate[dest] in 'r':
1406 elif self._repo.dirstate[dest] in 'r':
1466 self._repo.dirstate.normallookup(dest)
1407 self._repo.dirstate.normallookup(dest)
1467 self._repo.dirstate.copy(source, dest)
1408 self._repo.dirstate.copy(source, dest)
1468
1409
1469 def match(self, pats=None, include=None, exclude=None, default='glob',
1410 def match(self, pats=None, include=None, exclude=None, default='glob',
1470 listsubrepos=False, badfn=None):
1411 listsubrepos=False, badfn=None):
1471 r = self._repo
1412 r = self._repo
1472
1413
1473 # Only a case insensitive filesystem needs magic to translate user input
1414 # Only a case insensitive filesystem needs magic to translate user input
1474 # to actual case in the filesystem.
1415 # to actual case in the filesystem.
1475 icasefs = not util.fscasesensitive(r.root)
1416 icasefs = not util.fscasesensitive(r.root)
1476 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1417 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1477 default, auditor=r.auditor, ctx=self,
1418 default, auditor=r.auditor, ctx=self,
1478 listsubrepos=listsubrepos, badfn=badfn,
1419 listsubrepos=listsubrepos, badfn=badfn,
1479 icasefs=icasefs)
1420 icasefs=icasefs)
1480
1421
1481 def _filtersuspectsymlink(self, files):
1422 def _filtersuspectsymlink(self, files):
1482 if not files or self._repo.dirstate._checklink:
1423 if not files or self._repo.dirstate._checklink:
1483 return files
1424 return files
1484
1425
1485 # Symlink placeholders may get non-symlink-like contents
1426 # Symlink placeholders may get non-symlink-like contents
1486 # via user error or dereferencing by NFS or Samba servers,
1427 # via user error or dereferencing by NFS or Samba servers,
1487 # so we filter out any placeholders that don't look like a
1428 # so we filter out any placeholders that don't look like a
1488 # symlink
1429 # symlink
1489 sane = []
1430 sane = []
1490 for f in files:
1431 for f in files:
1491 if self.flags(f) == 'l':
1432 if self.flags(f) == 'l':
1492 d = self[f].data()
1433 d = self[f].data()
1493 if (d == '' or len(d) >= 1024 or '\n' in d
1434 if (d == '' or len(d) >= 1024 or '\n' in d
1494 or stringutil.binary(d)):
1435 or stringutil.binary(d)):
1495 self._repo.ui.debug('ignoring suspect symlink placeholder'
1436 self._repo.ui.debug('ignoring suspect symlink placeholder'
1496 ' "%s"\n' % f)
1437 ' "%s"\n' % f)
1497 continue
1438 continue
1498 sane.append(f)
1439 sane.append(f)
1499 return sane
1440 return sane
1500
1441
1501 def _checklookup(self, files):
1442 def _checklookup(self, files):
1502 # check for any possibly clean files
1443 # check for any possibly clean files
1503 if not files:
1444 if not files:
1504 return [], [], []
1445 return [], [], []
1505
1446
1506 modified = []
1447 modified = []
1507 deleted = []
1448 deleted = []
1508 fixup = []
1449 fixup = []
1509 pctx = self._parents[0]
1450 pctx = self._parents[0]
1510 # do a full compare of any files that might have changed
1451 # do a full compare of any files that might have changed
1511 for f in sorted(files):
1452 for f in sorted(files):
1512 try:
1453 try:
1513 # This will return True for a file that got replaced by a
1454 # This will return True for a file that got replaced by a
1514 # directory in the interim, but fixing that is pretty hard.
1455 # directory in the interim, but fixing that is pretty hard.
1515 if (f not in pctx or self.flags(f) != pctx.flags(f)
1456 if (f not in pctx or self.flags(f) != pctx.flags(f)
1516 or pctx[f].cmp(self[f])):
1457 or pctx[f].cmp(self[f])):
1517 modified.append(f)
1458 modified.append(f)
1518 else:
1459 else:
1519 fixup.append(f)
1460 fixup.append(f)
1520 except (IOError, OSError):
1461 except (IOError, OSError):
1521 # A file become inaccessible in between? Mark it as deleted,
1462 # A file become inaccessible in between? Mark it as deleted,
1522 # matching dirstate behavior (issue5584).
1463 # matching dirstate behavior (issue5584).
1523 # The dirstate has more complex behavior around whether a
1464 # The dirstate has more complex behavior around whether a
1524 # missing file matches a directory, etc, but we don't need to
1465 # missing file matches a directory, etc, but we don't need to
1525 # bother with that: if f has made it to this point, we're sure
1466 # bother with that: if f has made it to this point, we're sure
1526 # it's in the dirstate.
1467 # it's in the dirstate.
1527 deleted.append(f)
1468 deleted.append(f)
1528
1469
1529 return modified, deleted, fixup
1470 return modified, deleted, fixup
1530
1471
1531 def _poststatusfixup(self, status, fixup):
1472 def _poststatusfixup(self, status, fixup):
1532 """update dirstate for files that are actually clean"""
1473 """update dirstate for files that are actually clean"""
1533 poststatus = self._repo.postdsstatus()
1474 poststatus = self._repo.postdsstatus()
1534 if fixup or poststatus:
1475 if fixup or poststatus:
1535 try:
1476 try:
1536 oldid = self._repo.dirstate.identity()
1477 oldid = self._repo.dirstate.identity()
1537
1478
1538 # updating the dirstate is optional
1479 # updating the dirstate is optional
1539 # so we don't wait on the lock
1480 # so we don't wait on the lock
1540 # wlock can invalidate the dirstate, so cache normal _after_
1481 # wlock can invalidate the dirstate, so cache normal _after_
1541 # taking the lock
1482 # taking the lock
1542 with self._repo.wlock(False):
1483 with self._repo.wlock(False):
1543 if self._repo.dirstate.identity() == oldid:
1484 if self._repo.dirstate.identity() == oldid:
1544 if fixup:
1485 if fixup:
1545 normal = self._repo.dirstate.normal
1486 normal = self._repo.dirstate.normal
1546 for f in fixup:
1487 for f in fixup:
1547 normal(f)
1488 normal(f)
1548 # write changes out explicitly, because nesting
1489 # write changes out explicitly, because nesting
1549 # wlock at runtime may prevent 'wlock.release()'
1490 # wlock at runtime may prevent 'wlock.release()'
1550 # after this block from doing so for subsequent
1491 # after this block from doing so for subsequent
1551 # changing files
1492 # changing files
1552 tr = self._repo.currenttransaction()
1493 tr = self._repo.currenttransaction()
1553 self._repo.dirstate.write(tr)
1494 self._repo.dirstate.write(tr)
1554
1495
1555 if poststatus:
1496 if poststatus:
1556 for ps in poststatus:
1497 for ps in poststatus:
1557 ps(self, status)
1498 ps(self, status)
1558 else:
1499 else:
1559 # in this case, writing changes out breaks
1500 # in this case, writing changes out breaks
1560 # consistency, because .hg/dirstate was
1501 # consistency, because .hg/dirstate was
1561 # already changed simultaneously after last
1502 # already changed simultaneously after last
1562 # caching (see also issue5584 for detail)
1503 # caching (see also issue5584 for detail)
1563 self._repo.ui.debug('skip updating dirstate: '
1504 self._repo.ui.debug('skip updating dirstate: '
1564 'identity mismatch\n')
1505 'identity mismatch\n')
1565 except error.LockError:
1506 except error.LockError:
1566 pass
1507 pass
1567 finally:
1508 finally:
1568 # Even if the wlock couldn't be grabbed, clear out the list.
1509 # Even if the wlock couldn't be grabbed, clear out the list.
1569 self._repo.clearpostdsstatus()
1510 self._repo.clearpostdsstatus()
1570
1511
1571 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1512 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1572 '''Gets the status from the dirstate -- internal use only.'''
1513 '''Gets the status from the dirstate -- internal use only.'''
1573 subrepos = []
1514 subrepos = []
1574 if '.hgsub' in self:
1515 if '.hgsub' in self:
1575 subrepos = sorted(self.substate)
1516 subrepos = sorted(self.substate)
1576 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1517 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1577 clean=clean, unknown=unknown)
1518 clean=clean, unknown=unknown)
1578
1519
1579 # check for any possibly clean files
1520 # check for any possibly clean files
1580 fixup = []
1521 fixup = []
1581 if cmp:
1522 if cmp:
1582 modified2, deleted2, fixup = self._checklookup(cmp)
1523 modified2, deleted2, fixup = self._checklookup(cmp)
1583 s.modified.extend(modified2)
1524 s.modified.extend(modified2)
1584 s.deleted.extend(deleted2)
1525 s.deleted.extend(deleted2)
1585
1526
1586 if fixup and clean:
1527 if fixup and clean:
1587 s.clean.extend(fixup)
1528 s.clean.extend(fixup)
1588
1529
1589 self._poststatusfixup(s, fixup)
1530 self._poststatusfixup(s, fixup)
1590
1531
1591 if match.always():
1532 if match.always():
1592 # cache for performance
1533 # cache for performance
1593 if s.unknown or s.ignored or s.clean:
1534 if s.unknown or s.ignored or s.clean:
1594 # "_status" is cached with list*=False in the normal route
1535 # "_status" is cached with list*=False in the normal route
1595 self._status = scmutil.status(s.modified, s.added, s.removed,
1536 self._status = scmutil.status(s.modified, s.added, s.removed,
1596 s.deleted, [], [], [])
1537 s.deleted, [], [], [])
1597 else:
1538 else:
1598 self._status = s
1539 self._status = s
1599
1540
1600 return s
1541 return s
1601
1542
1602 @propertycache
1543 @propertycache
1603 def _manifest(self):
1544 def _manifest(self):
1604 """generate a manifest corresponding to the values in self._status
1545 """generate a manifest corresponding to the values in self._status
1605
1546
1606 This reuse the file nodeid from parent, but we use special node
1547 This reuse the file nodeid from parent, but we use special node
1607 identifiers for added and modified files. This is used by manifests
1548 identifiers for added and modified files. This is used by manifests
1608 merge to see that files are different and by update logic to avoid
1549 merge to see that files are different and by update logic to avoid
1609 deleting newly added files.
1550 deleting newly added files.
1610 """
1551 """
1611 return self._buildstatusmanifest(self._status)
1552 return self._buildstatusmanifest(self._status)
1612
1553
1613 def _buildstatusmanifest(self, status):
1554 def _buildstatusmanifest(self, status):
1614 """Builds a manifest that includes the given status results."""
1555 """Builds a manifest that includes the given status results."""
1615 parents = self.parents()
1556 parents = self.parents()
1616
1557
1617 man = parents[0].manifest().copy()
1558 man = parents[0].manifest().copy()
1618
1559
1619 ff = self._flagfunc
1560 ff = self._flagfunc
1620 for i, l in ((addednodeid, status.added),
1561 for i, l in ((addednodeid, status.added),
1621 (modifiednodeid, status.modified)):
1562 (modifiednodeid, status.modified)):
1622 for f in l:
1563 for f in l:
1623 man[f] = i
1564 man[f] = i
1624 try:
1565 try:
1625 man.setflag(f, ff(f))
1566 man.setflag(f, ff(f))
1626 except OSError:
1567 except OSError:
1627 pass
1568 pass
1628
1569
1629 for f in status.deleted + status.removed:
1570 for f in status.deleted + status.removed:
1630 if f in man:
1571 if f in man:
1631 del man[f]
1572 del man[f]
1632
1573
1633 return man
1574 return man
1634
1575
1635 def _buildstatus(self, other, s, match, listignored, listclean,
1576 def _buildstatus(self, other, s, match, listignored, listclean,
1636 listunknown):
1577 listunknown):
1637 """build a status with respect to another context
1578 """build a status with respect to another context
1638
1579
1639 This includes logic for maintaining the fast path of status when
1580 This includes logic for maintaining the fast path of status when
1640 comparing the working directory against its parent, which is to skip
1581 comparing the working directory against its parent, which is to skip
1641 building a new manifest if self (working directory) is not comparing
1582 building a new manifest if self (working directory) is not comparing
1642 against its parent (repo['.']).
1583 against its parent (repo['.']).
1643 """
1584 """
1644 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1585 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1645 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1586 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1646 # might have accidentally ended up with the entire contents of the file
1587 # might have accidentally ended up with the entire contents of the file
1647 # they are supposed to be linking to.
1588 # they are supposed to be linking to.
1648 s.modified[:] = self._filtersuspectsymlink(s.modified)
1589 s.modified[:] = self._filtersuspectsymlink(s.modified)
1649 if other != self._repo['.']:
1590 if other != self._repo['.']:
1650 s = super(workingctx, self)._buildstatus(other, s, match,
1591 s = super(workingctx, self)._buildstatus(other, s, match,
1651 listignored, listclean,
1592 listignored, listclean,
1652 listunknown)
1593 listunknown)
1653 return s
1594 return s
1654
1595
1655 def _matchstatus(self, other, match):
1596 def _matchstatus(self, other, match):
1656 """override the match method with a filter for directory patterns
1597 """override the match method with a filter for directory patterns
1657
1598
1658 We use inheritance to customize the match.bad method only in cases of
1599 We use inheritance to customize the match.bad method only in cases of
1659 workingctx since it belongs only to the working directory when
1600 workingctx since it belongs only to the working directory when
1660 comparing against the parent changeset.
1601 comparing against the parent changeset.
1661
1602
1662 If we aren't comparing against the working directory's parent, then we
1603 If we aren't comparing against the working directory's parent, then we
1663 just use the default match object sent to us.
1604 just use the default match object sent to us.
1664 """
1605 """
1665 if other != self._repo['.']:
1606 if other != self._repo['.']:
1666 def bad(f, msg):
1607 def bad(f, msg):
1667 # 'f' may be a directory pattern from 'match.files()',
1608 # 'f' may be a directory pattern from 'match.files()',
1668 # so 'f not in ctx1' is not enough
1609 # so 'f not in ctx1' is not enough
1669 if f not in other and not other.hasdir(f):
1610 if f not in other and not other.hasdir(f):
1670 self._repo.ui.warn('%s: %s\n' %
1611 self._repo.ui.warn('%s: %s\n' %
1671 (self._repo.dirstate.pathto(f), msg))
1612 (self._repo.dirstate.pathto(f), msg))
1672 match.bad = bad
1613 match.bad = bad
1673 return match
1614 return match
1674
1615
1675 def markcommitted(self, node):
1616 def markcommitted(self, node):
1676 super(workingctx, self).markcommitted(node)
1617 super(workingctx, self).markcommitted(node)
1677
1618
1678 sparse.aftercommit(self._repo, node)
1619 sparse.aftercommit(self._repo, node)
1679
1620
1680 class committablefilectx(basefilectx):
1621 class committablefilectx(basefilectx):
1681 """A committablefilectx provides common functionality for a file context
1622 """A committablefilectx provides common functionality for a file context
1682 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1623 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1683 def __init__(self, repo, path, filelog=None, ctx=None):
1624 def __init__(self, repo, path, filelog=None, ctx=None):
1684 self._repo = repo
1625 self._repo = repo
1685 self._path = path
1626 self._path = path
1686 self._changeid = None
1627 self._changeid = None
1687 self._filerev = self._filenode = None
1628 self._filerev = self._filenode = None
1688
1629
1689 if filelog is not None:
1630 if filelog is not None:
1690 self._filelog = filelog
1631 self._filelog = filelog
1691 if ctx:
1632 if ctx:
1692 self._changectx = ctx
1633 self._changectx = ctx
1693
1634
1694 def __nonzero__(self):
1635 def __nonzero__(self):
1695 return True
1636 return True
1696
1637
1697 __bool__ = __nonzero__
1638 __bool__ = __nonzero__
1698
1639
1699 def linkrev(self):
1640 def linkrev(self):
1700 # linked to self._changectx no matter if file is modified or not
1641 # linked to self._changectx no matter if file is modified or not
1701 return self.rev()
1642 return self.rev()
1702
1643
1703 def parents(self):
1644 def parents(self):
1704 '''return parent filectxs, following copies if necessary'''
1645 '''return parent filectxs, following copies if necessary'''
1705 def filenode(ctx, path):
1646 def filenode(ctx, path):
1706 return ctx._manifest.get(path, nullid)
1647 return ctx._manifest.get(path, nullid)
1707
1648
1708 path = self._path
1649 path = self._path
1709 fl = self._filelog
1650 fl = self._filelog
1710 pcl = self._changectx._parents
1651 pcl = self._changectx._parents
1711 renamed = self.renamed()
1652 renamed = self.renamed()
1712
1653
1713 if renamed:
1654 if renamed:
1714 pl = [renamed + (None,)]
1655 pl = [renamed + (None,)]
1715 else:
1656 else:
1716 pl = [(path, filenode(pcl[0], path), fl)]
1657 pl = [(path, filenode(pcl[0], path), fl)]
1717
1658
1718 for pc in pcl[1:]:
1659 for pc in pcl[1:]:
1719 pl.append((path, filenode(pc, path), fl))
1660 pl.append((path, filenode(pc, path), fl))
1720
1661
1721 return [self._parentfilectx(p, fileid=n, filelog=l)
1662 return [self._parentfilectx(p, fileid=n, filelog=l)
1722 for p, n, l in pl if n != nullid]
1663 for p, n, l in pl if n != nullid]
1723
1664
1724 def children(self):
1665 def children(self):
1725 return []
1666 return []
1726
1667
1727 class workingfilectx(committablefilectx):
1668 class workingfilectx(committablefilectx):
1728 """A workingfilectx object makes access to data related to a particular
1669 """A workingfilectx object makes access to data related to a particular
1729 file in the working directory convenient."""
1670 file in the working directory convenient."""
1730 def __init__(self, repo, path, filelog=None, workingctx=None):
1671 def __init__(self, repo, path, filelog=None, workingctx=None):
1731 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1672 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1732
1673
1733 @propertycache
1674 @propertycache
1734 def _changectx(self):
1675 def _changectx(self):
1735 return workingctx(self._repo)
1676 return workingctx(self._repo)
1736
1677
1737 def data(self):
1678 def data(self):
1738 return self._repo.wread(self._path)
1679 return self._repo.wread(self._path)
1739 def renamed(self):
1680 def renamed(self):
1740 rp = self._repo.dirstate.copied(self._path)
1681 rp = self._repo.dirstate.copied(self._path)
1741 if not rp:
1682 if not rp:
1742 return None
1683 return None
1743 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1684 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1744
1685
1745 def size(self):
1686 def size(self):
1746 return self._repo.wvfs.lstat(self._path).st_size
1687 return self._repo.wvfs.lstat(self._path).st_size
1747 def date(self):
1688 def date(self):
1748 t, tz = self._changectx.date()
1689 t, tz = self._changectx.date()
1749 try:
1690 try:
1750 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1691 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1751 except OSError as err:
1692 except OSError as err:
1752 if err.errno != errno.ENOENT:
1693 if err.errno != errno.ENOENT:
1753 raise
1694 raise
1754 return (t, tz)
1695 return (t, tz)
1755
1696
1756 def exists(self):
1697 def exists(self):
1757 return self._repo.wvfs.exists(self._path)
1698 return self._repo.wvfs.exists(self._path)
1758
1699
1759 def lexists(self):
1700 def lexists(self):
1760 return self._repo.wvfs.lexists(self._path)
1701 return self._repo.wvfs.lexists(self._path)
1761
1702
1762 def audit(self):
1703 def audit(self):
1763 return self._repo.wvfs.audit(self._path)
1704 return self._repo.wvfs.audit(self._path)
1764
1705
1765 def cmp(self, fctx):
1706 def cmp(self, fctx):
1766 """compare with other file context
1707 """compare with other file context
1767
1708
1768 returns True if different than fctx.
1709 returns True if different than fctx.
1769 """
1710 """
1770 # fctx should be a filectx (not a workingfilectx)
1711 # fctx should be a filectx (not a workingfilectx)
1771 # invert comparison to reuse the same code path
1712 # invert comparison to reuse the same code path
1772 return fctx.cmp(self)
1713 return fctx.cmp(self)
1773
1714
1774 def remove(self, ignoremissing=False):
1715 def remove(self, ignoremissing=False):
1775 """wraps unlink for a repo's working directory"""
1716 """wraps unlink for a repo's working directory"""
1776 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1717 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1777
1718
1778 def write(self, data, flags, backgroundclose=False, **kwargs):
1719 def write(self, data, flags, backgroundclose=False, **kwargs):
1779 """wraps repo.wwrite"""
1720 """wraps repo.wwrite"""
1780 self._repo.wwrite(self._path, data, flags,
1721 self._repo.wwrite(self._path, data, flags,
1781 backgroundclose=backgroundclose,
1722 backgroundclose=backgroundclose,
1782 **kwargs)
1723 **kwargs)
1783
1724
1784 def markcopied(self, src):
1725 def markcopied(self, src):
1785 """marks this file a copy of `src`"""
1726 """marks this file a copy of `src`"""
1786 if self._repo.dirstate[self._path] in "nma":
1727 if self._repo.dirstate[self._path] in "nma":
1787 self._repo.dirstate.copy(src, self._path)
1728 self._repo.dirstate.copy(src, self._path)
1788
1729
1789 def clearunknown(self):
1730 def clearunknown(self):
1790 """Removes conflicting items in the working directory so that
1731 """Removes conflicting items in the working directory so that
1791 ``write()`` can be called successfully.
1732 ``write()`` can be called successfully.
1792 """
1733 """
1793 wvfs = self._repo.wvfs
1734 wvfs = self._repo.wvfs
1794 f = self._path
1735 f = self._path
1795 wvfs.audit(f)
1736 wvfs.audit(f)
1796 if wvfs.isdir(f) and not wvfs.islink(f):
1737 if wvfs.isdir(f) and not wvfs.islink(f):
1797 wvfs.rmtree(f, forcibly=True)
1738 wvfs.rmtree(f, forcibly=True)
1798 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1739 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1799 for p in reversed(list(util.finddirs(f))):
1740 for p in reversed(list(util.finddirs(f))):
1800 if wvfs.isfileorlink(p):
1741 if wvfs.isfileorlink(p):
1801 wvfs.unlink(p)
1742 wvfs.unlink(p)
1802 break
1743 break
1803
1744
1804 def setflags(self, l, x):
1745 def setflags(self, l, x):
1805 self._repo.wvfs.setflags(self._path, l, x)
1746 self._repo.wvfs.setflags(self._path, l, x)
1806
1747
1807 class overlayworkingctx(committablectx):
1748 class overlayworkingctx(committablectx):
1808 """Wraps another mutable context with a write-back cache that can be
1749 """Wraps another mutable context with a write-back cache that can be
1809 converted into a commit context.
1750 converted into a commit context.
1810
1751
1811 self._cache[path] maps to a dict with keys: {
1752 self._cache[path] maps to a dict with keys: {
1812 'exists': bool?
1753 'exists': bool?
1813 'date': date?
1754 'date': date?
1814 'data': str?
1755 'data': str?
1815 'flags': str?
1756 'flags': str?
1816 'copied': str? (path or None)
1757 'copied': str? (path or None)
1817 }
1758 }
1818 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1759 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1819 is `False`, the file was deleted.
1760 is `False`, the file was deleted.
1820 """
1761 """
1821
1762
1822 def __init__(self, repo):
1763 def __init__(self, repo):
1823 super(overlayworkingctx, self).__init__(repo)
1764 super(overlayworkingctx, self).__init__(repo)
1824 self.clean()
1765 self.clean()
1825
1766
1826 def setbase(self, wrappedctx):
1767 def setbase(self, wrappedctx):
1827 self._wrappedctx = wrappedctx
1768 self._wrappedctx = wrappedctx
1828 self._parents = [wrappedctx]
1769 self._parents = [wrappedctx]
1829 # Drop old manifest cache as it is now out of date.
1770 # Drop old manifest cache as it is now out of date.
1830 # This is necessary when, e.g., rebasing several nodes with one
1771 # This is necessary when, e.g., rebasing several nodes with one
1831 # ``overlayworkingctx`` (e.g. with --collapse).
1772 # ``overlayworkingctx`` (e.g. with --collapse).
1832 util.clearcachedproperty(self, '_manifest')
1773 util.clearcachedproperty(self, '_manifest')
1833
1774
1834 def data(self, path):
1775 def data(self, path):
1835 if self.isdirty(path):
1776 if self.isdirty(path):
1836 if self._cache[path]['exists']:
1777 if self._cache[path]['exists']:
1837 if self._cache[path]['data']:
1778 if self._cache[path]['data']:
1838 return self._cache[path]['data']
1779 return self._cache[path]['data']
1839 else:
1780 else:
1840 # Must fallback here, too, because we only set flags.
1781 # Must fallback here, too, because we only set flags.
1841 return self._wrappedctx[path].data()
1782 return self._wrappedctx[path].data()
1842 else:
1783 else:
1843 raise error.ProgrammingError("No such file or directory: %s" %
1784 raise error.ProgrammingError("No such file or directory: %s" %
1844 path)
1785 path)
1845 else:
1786 else:
1846 return self._wrappedctx[path].data()
1787 return self._wrappedctx[path].data()
1847
1788
1848 @propertycache
1789 @propertycache
1849 def _manifest(self):
1790 def _manifest(self):
1850 parents = self.parents()
1791 parents = self.parents()
1851 man = parents[0].manifest().copy()
1792 man = parents[0].manifest().copy()
1852
1793
1853 flag = self._flagfunc
1794 flag = self._flagfunc
1854 for path in self.added():
1795 for path in self.added():
1855 man[path] = addednodeid
1796 man[path] = addednodeid
1856 man.setflag(path, flag(path))
1797 man.setflag(path, flag(path))
1857 for path in self.modified():
1798 for path in self.modified():
1858 man[path] = modifiednodeid
1799 man[path] = modifiednodeid
1859 man.setflag(path, flag(path))
1800 man.setflag(path, flag(path))
1860 for path in self.removed():
1801 for path in self.removed():
1861 del man[path]
1802 del man[path]
1862 return man
1803 return man
1863
1804
1864 @propertycache
1805 @propertycache
1865 def _flagfunc(self):
1806 def _flagfunc(self):
1866 def f(path):
1807 def f(path):
1867 return self._cache[path]['flags']
1808 return self._cache[path]['flags']
1868 return f
1809 return f
1869
1810
1870 def files(self):
1811 def files(self):
1871 return sorted(self.added() + self.modified() + self.removed())
1812 return sorted(self.added() + self.modified() + self.removed())
1872
1813
1873 def modified(self):
1814 def modified(self):
1874 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1815 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1875 self._existsinparent(f)]
1816 self._existsinparent(f)]
1876
1817
1877 def added(self):
1818 def added(self):
1878 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1819 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1879 not self._existsinparent(f)]
1820 not self._existsinparent(f)]
1880
1821
1881 def removed(self):
1822 def removed(self):
1882 return [f for f in self._cache.keys() if
1823 return [f for f in self._cache.keys() if
1883 not self._cache[f]['exists'] and self._existsinparent(f)]
1824 not self._cache[f]['exists'] and self._existsinparent(f)]
1884
1825
1885 def isinmemory(self):
1826 def isinmemory(self):
1886 return True
1827 return True
1887
1828
1888 def filedate(self, path):
1829 def filedate(self, path):
1889 if self.isdirty(path):
1830 if self.isdirty(path):
1890 return self._cache[path]['date']
1831 return self._cache[path]['date']
1891 else:
1832 else:
1892 return self._wrappedctx[path].date()
1833 return self._wrappedctx[path].date()
1893
1834
1894 def markcopied(self, path, origin):
1835 def markcopied(self, path, origin):
1895 if self.isdirty(path):
1836 if self.isdirty(path):
1896 self._cache[path]['copied'] = origin
1837 self._cache[path]['copied'] = origin
1897 else:
1838 else:
1898 raise error.ProgrammingError('markcopied() called on clean context')
1839 raise error.ProgrammingError('markcopied() called on clean context')
1899
1840
1900 def copydata(self, path):
1841 def copydata(self, path):
1901 if self.isdirty(path):
1842 if self.isdirty(path):
1902 return self._cache[path]['copied']
1843 return self._cache[path]['copied']
1903 else:
1844 else:
1904 raise error.ProgrammingError('copydata() called on clean context')
1845 raise error.ProgrammingError('copydata() called on clean context')
1905
1846
1906 def flags(self, path):
1847 def flags(self, path):
1907 if self.isdirty(path):
1848 if self.isdirty(path):
1908 if self._cache[path]['exists']:
1849 if self._cache[path]['exists']:
1909 return self._cache[path]['flags']
1850 return self._cache[path]['flags']
1910 else:
1851 else:
1911 raise error.ProgrammingError("No such file or directory: %s" %
1852 raise error.ProgrammingError("No such file or directory: %s" %
1912 self._path)
1853 self._path)
1913 else:
1854 else:
1914 return self._wrappedctx[path].flags()
1855 return self._wrappedctx[path].flags()
1915
1856
1916 def _existsinparent(self, path):
1857 def _existsinparent(self, path):
1917 try:
1858 try:
1918 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1859 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1919 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1860 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1920 # with an ``exists()`` function.
1861 # with an ``exists()`` function.
1921 self._wrappedctx[path]
1862 self._wrappedctx[path]
1922 return True
1863 return True
1923 except error.ManifestLookupError:
1864 except error.ManifestLookupError:
1924 return False
1865 return False
1925
1866
1926 def _auditconflicts(self, path):
1867 def _auditconflicts(self, path):
1927 """Replicates conflict checks done by wvfs.write().
1868 """Replicates conflict checks done by wvfs.write().
1928
1869
1929 Since we never write to the filesystem and never call `applyupdates` in
1870 Since we never write to the filesystem and never call `applyupdates` in
1930 IMM, we'll never check that a path is actually writable -- e.g., because
1871 IMM, we'll never check that a path is actually writable -- e.g., because
1931 it adds `a/foo`, but `a` is actually a file in the other commit.
1872 it adds `a/foo`, but `a` is actually a file in the other commit.
1932 """
1873 """
1933 def fail(path, component):
1874 def fail(path, component):
1934 # p1() is the base and we're receiving "writes" for p2()'s
1875 # p1() is the base and we're receiving "writes" for p2()'s
1935 # files.
1876 # files.
1936 if 'l' in self.p1()[component].flags():
1877 if 'l' in self.p1()[component].flags():
1937 raise error.Abort("error: %s conflicts with symlink %s "
1878 raise error.Abort("error: %s conflicts with symlink %s "
1938 "in %s." % (path, component,
1879 "in %s." % (path, component,
1939 self.p1().rev()))
1880 self.p1().rev()))
1940 else:
1881 else:
1941 raise error.Abort("error: '%s' conflicts with file '%s' in "
1882 raise error.Abort("error: '%s' conflicts with file '%s' in "
1942 "%s." % (path, component,
1883 "%s." % (path, component,
1943 self.p1().rev()))
1884 self.p1().rev()))
1944
1885
1945 # Test that each new directory to be created to write this path from p2
1886 # Test that each new directory to be created to write this path from p2
1946 # is not a file in p1.
1887 # is not a file in p1.
1947 components = path.split('/')
1888 components = path.split('/')
1948 for i in xrange(len(components)):
1889 for i in xrange(len(components)):
1949 component = "/".join(components[0:i])
1890 component = "/".join(components[0:i])
1950 if component in self.p1():
1891 if component in self.p1():
1951 fail(path, component)
1892 fail(path, component)
1952
1893
1953 # Test the other direction -- that this path from p2 isn't a directory
1894 # Test the other direction -- that this path from p2 isn't a directory
1954 # in p1 (test that p1 doesn't any paths matching `path/*`).
1895 # in p1 (test that p1 doesn't any paths matching `path/*`).
1955 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1896 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1956 matches = self.p1().manifest().matches(match)
1897 matches = self.p1().manifest().matches(match)
1957 if len(matches) > 0:
1898 if len(matches) > 0:
1958 if len(matches) == 1 and matches.keys()[0] == path:
1899 if len(matches) == 1 and matches.keys()[0] == path:
1959 return
1900 return
1960 raise error.Abort("error: file '%s' cannot be written because "
1901 raise error.Abort("error: file '%s' cannot be written because "
1961 " '%s/' is a folder in %s (containing %d "
1902 " '%s/' is a folder in %s (containing %d "
1962 "entries: %s)"
1903 "entries: %s)"
1963 % (path, path, self.p1(), len(matches),
1904 % (path, path, self.p1(), len(matches),
1964 ', '.join(matches.keys())))
1905 ', '.join(matches.keys())))
1965
1906
1966 def write(self, path, data, flags='', **kwargs):
1907 def write(self, path, data, flags='', **kwargs):
1967 if data is None:
1908 if data is None:
1968 raise error.ProgrammingError("data must be non-None")
1909 raise error.ProgrammingError("data must be non-None")
1969 self._auditconflicts(path)
1910 self._auditconflicts(path)
1970 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1911 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1971 flags=flags)
1912 flags=flags)
1972
1913
1973 def setflags(self, path, l, x):
1914 def setflags(self, path, l, x):
1974 self._markdirty(path, exists=True, date=dateutil.makedate(),
1915 self._markdirty(path, exists=True, date=dateutil.makedate(),
1975 flags=(l and 'l' or '') + (x and 'x' or ''))
1916 flags=(l and 'l' or '') + (x and 'x' or ''))
1976
1917
1977 def remove(self, path):
1918 def remove(self, path):
1978 self._markdirty(path, exists=False)
1919 self._markdirty(path, exists=False)
1979
1920
1980 def exists(self, path):
1921 def exists(self, path):
1981 """exists behaves like `lexists`, but needs to follow symlinks and
1922 """exists behaves like `lexists`, but needs to follow symlinks and
1982 return False if they are broken.
1923 return False if they are broken.
1983 """
1924 """
1984 if self.isdirty(path):
1925 if self.isdirty(path):
1985 # If this path exists and is a symlink, "follow" it by calling
1926 # If this path exists and is a symlink, "follow" it by calling
1986 # exists on the destination path.
1927 # exists on the destination path.
1987 if (self._cache[path]['exists'] and
1928 if (self._cache[path]['exists'] and
1988 'l' in self._cache[path]['flags']):
1929 'l' in self._cache[path]['flags']):
1989 return self.exists(self._cache[path]['data'].strip())
1930 return self.exists(self._cache[path]['data'].strip())
1990 else:
1931 else:
1991 return self._cache[path]['exists']
1932 return self._cache[path]['exists']
1992
1933
1993 return self._existsinparent(path)
1934 return self._existsinparent(path)
1994
1935
1995 def lexists(self, path):
1936 def lexists(self, path):
1996 """lexists returns True if the path exists"""
1937 """lexists returns True if the path exists"""
1997 if self.isdirty(path):
1938 if self.isdirty(path):
1998 return self._cache[path]['exists']
1939 return self._cache[path]['exists']
1999
1940
2000 return self._existsinparent(path)
1941 return self._existsinparent(path)
2001
1942
2002 def size(self, path):
1943 def size(self, path):
2003 if self.isdirty(path):
1944 if self.isdirty(path):
2004 if self._cache[path]['exists']:
1945 if self._cache[path]['exists']:
2005 return len(self._cache[path]['data'])
1946 return len(self._cache[path]['data'])
2006 else:
1947 else:
2007 raise error.ProgrammingError("No such file or directory: %s" %
1948 raise error.ProgrammingError("No such file or directory: %s" %
2008 self._path)
1949 self._path)
2009 return self._wrappedctx[path].size()
1950 return self._wrappedctx[path].size()
2010
1951
2011 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1952 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2012 user=None, editor=None):
1953 user=None, editor=None):
2013 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1954 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2014 committed.
1955 committed.
2015
1956
2016 ``text`` is the commit message.
1957 ``text`` is the commit message.
2017 ``parents`` (optional) are rev numbers.
1958 ``parents`` (optional) are rev numbers.
2018 """
1959 """
2019 # Default parents to the wrapped contexts' if not passed.
1960 # Default parents to the wrapped contexts' if not passed.
2020 if parents is None:
1961 if parents is None:
2021 parents = self._wrappedctx.parents()
1962 parents = self._wrappedctx.parents()
2022 if len(parents) == 1:
1963 if len(parents) == 1:
2023 parents = (parents[0], None)
1964 parents = (parents[0], None)
2024
1965
2025 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1966 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2026 if parents[1] is None:
1967 if parents[1] is None:
2027 parents = (self._repo[parents[0]], None)
1968 parents = (self._repo[parents[0]], None)
2028 else:
1969 else:
2029 parents = (self._repo[parents[0]], self._repo[parents[1]])
1970 parents = (self._repo[parents[0]], self._repo[parents[1]])
2030
1971
2031 files = self._cache.keys()
1972 files = self._cache.keys()
2032 def getfile(repo, memctx, path):
1973 def getfile(repo, memctx, path):
2033 if self._cache[path]['exists']:
1974 if self._cache[path]['exists']:
2034 return memfilectx(repo, memctx, path,
1975 return memfilectx(repo, memctx, path,
2035 self._cache[path]['data'],
1976 self._cache[path]['data'],
2036 'l' in self._cache[path]['flags'],
1977 'l' in self._cache[path]['flags'],
2037 'x' in self._cache[path]['flags'],
1978 'x' in self._cache[path]['flags'],
2038 self._cache[path]['copied'])
1979 self._cache[path]['copied'])
2039 else:
1980 else:
2040 # Returning None, but including the path in `files`, is
1981 # Returning None, but including the path in `files`, is
2041 # necessary for memctx to register a deletion.
1982 # necessary for memctx to register a deletion.
2042 return None
1983 return None
2043 return memctx(self._repo, parents, text, files, getfile, date=date,
1984 return memctx(self._repo, parents, text, files, getfile, date=date,
2044 extra=extra, user=user, branch=branch, editor=editor)
1985 extra=extra, user=user, branch=branch, editor=editor)
2045
1986
2046 def isdirty(self, path):
1987 def isdirty(self, path):
2047 return path in self._cache
1988 return path in self._cache
2048
1989
2049 def isempty(self):
1990 def isempty(self):
2050 # We need to discard any keys that are actually clean before the empty
1991 # We need to discard any keys that are actually clean before the empty
2051 # commit check.
1992 # commit check.
2052 self._compact()
1993 self._compact()
2053 return len(self._cache) == 0
1994 return len(self._cache) == 0
2054
1995
2055 def clean(self):
1996 def clean(self):
2056 self._cache = {}
1997 self._cache = {}
2057
1998
2058 def _compact(self):
1999 def _compact(self):
2059 """Removes keys from the cache that are actually clean, by comparing
2000 """Removes keys from the cache that are actually clean, by comparing
2060 them with the underlying context.
2001 them with the underlying context.
2061
2002
2062 This can occur during the merge process, e.g. by passing --tool :local
2003 This can occur during the merge process, e.g. by passing --tool :local
2063 to resolve a conflict.
2004 to resolve a conflict.
2064 """
2005 """
2065 keys = []
2006 keys = []
2066 for path in self._cache.keys():
2007 for path in self._cache.keys():
2067 cache = self._cache[path]
2008 cache = self._cache[path]
2068 try:
2009 try:
2069 underlying = self._wrappedctx[path]
2010 underlying = self._wrappedctx[path]
2070 if (underlying.data() == cache['data'] and
2011 if (underlying.data() == cache['data'] and
2071 underlying.flags() == cache['flags']):
2012 underlying.flags() == cache['flags']):
2072 keys.append(path)
2013 keys.append(path)
2073 except error.ManifestLookupError:
2014 except error.ManifestLookupError:
2074 # Path not in the underlying manifest (created).
2015 # Path not in the underlying manifest (created).
2075 continue
2016 continue
2076
2017
2077 for path in keys:
2018 for path in keys:
2078 del self._cache[path]
2019 del self._cache[path]
2079 return keys
2020 return keys
2080
2021
2081 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2022 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2082 self._cache[path] = {
2023 self._cache[path] = {
2083 'exists': exists,
2024 'exists': exists,
2084 'data': data,
2025 'data': data,
2085 'date': date,
2026 'date': date,
2086 'flags': flags,
2027 'flags': flags,
2087 'copied': None,
2028 'copied': None,
2088 }
2029 }
2089
2030
2090 def filectx(self, path, filelog=None):
2031 def filectx(self, path, filelog=None):
2091 return overlayworkingfilectx(self._repo, path, parent=self,
2032 return overlayworkingfilectx(self._repo, path, parent=self,
2092 filelog=filelog)
2033 filelog=filelog)
2093
2034
2094 class overlayworkingfilectx(committablefilectx):
2035 class overlayworkingfilectx(committablefilectx):
2095 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2036 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2096 cache, which can be flushed through later by calling ``flush()``."""
2037 cache, which can be flushed through later by calling ``flush()``."""
2097
2038
2098 def __init__(self, repo, path, filelog=None, parent=None):
2039 def __init__(self, repo, path, filelog=None, parent=None):
2099 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2040 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2100 parent)
2041 parent)
2101 self._repo = repo
2042 self._repo = repo
2102 self._parent = parent
2043 self._parent = parent
2103 self._path = path
2044 self._path = path
2104
2045
2105 def cmp(self, fctx):
2046 def cmp(self, fctx):
2106 return self.data() != fctx.data()
2047 return self.data() != fctx.data()
2107
2048
2108 def changectx(self):
2049 def changectx(self):
2109 return self._parent
2050 return self._parent
2110
2051
2111 def data(self):
2052 def data(self):
2112 return self._parent.data(self._path)
2053 return self._parent.data(self._path)
2113
2054
2114 def date(self):
2055 def date(self):
2115 return self._parent.filedate(self._path)
2056 return self._parent.filedate(self._path)
2116
2057
2117 def exists(self):
2058 def exists(self):
2118 return self.lexists()
2059 return self.lexists()
2119
2060
2120 def lexists(self):
2061 def lexists(self):
2121 return self._parent.exists(self._path)
2062 return self._parent.exists(self._path)
2122
2063
2123 def renamed(self):
2064 def renamed(self):
2124 path = self._parent.copydata(self._path)
2065 path = self._parent.copydata(self._path)
2125 if not path:
2066 if not path:
2126 return None
2067 return None
2127 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2068 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2128
2069
2129 def size(self):
2070 def size(self):
2130 return self._parent.size(self._path)
2071 return self._parent.size(self._path)
2131
2072
2132 def markcopied(self, origin):
2073 def markcopied(self, origin):
2133 self._parent.markcopied(self._path, origin)
2074 self._parent.markcopied(self._path, origin)
2134
2075
2135 def audit(self):
2076 def audit(self):
2136 pass
2077 pass
2137
2078
2138 def flags(self):
2079 def flags(self):
2139 return self._parent.flags(self._path)
2080 return self._parent.flags(self._path)
2140
2081
2141 def setflags(self, islink, isexec):
2082 def setflags(self, islink, isexec):
2142 return self._parent.setflags(self._path, islink, isexec)
2083 return self._parent.setflags(self._path, islink, isexec)
2143
2084
2144 def write(self, data, flags, backgroundclose=False, **kwargs):
2085 def write(self, data, flags, backgroundclose=False, **kwargs):
2145 return self._parent.write(self._path, data, flags, **kwargs)
2086 return self._parent.write(self._path, data, flags, **kwargs)
2146
2087
2147 def remove(self, ignoremissing=False):
2088 def remove(self, ignoremissing=False):
2148 return self._parent.remove(self._path)
2089 return self._parent.remove(self._path)
2149
2090
2150 def clearunknown(self):
2091 def clearunknown(self):
2151 pass
2092 pass
2152
2093
2153 class workingcommitctx(workingctx):
2094 class workingcommitctx(workingctx):
2154 """A workingcommitctx object makes access to data related to
2095 """A workingcommitctx object makes access to data related to
2155 the revision being committed convenient.
2096 the revision being committed convenient.
2156
2097
2157 This hides changes in the working directory, if they aren't
2098 This hides changes in the working directory, if they aren't
2158 committed in this context.
2099 committed in this context.
2159 """
2100 """
2160 def __init__(self, repo, changes,
2101 def __init__(self, repo, changes,
2161 text="", user=None, date=None, extra=None):
2102 text="", user=None, date=None, extra=None):
2162 super(workingctx, self).__init__(repo, text, user, date, extra,
2103 super(workingctx, self).__init__(repo, text, user, date, extra,
2163 changes)
2104 changes)
2164
2105
2165 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2106 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2166 """Return matched files only in ``self._status``
2107 """Return matched files only in ``self._status``
2167
2108
2168 Uncommitted files appear "clean" via this context, even if
2109 Uncommitted files appear "clean" via this context, even if
2169 they aren't actually so in the working directory.
2110 they aren't actually so in the working directory.
2170 """
2111 """
2171 if clean:
2112 if clean:
2172 clean = [f for f in self._manifest if f not in self._changedset]
2113 clean = [f for f in self._manifest if f not in self._changedset]
2173 else:
2114 else:
2174 clean = []
2115 clean = []
2175 return scmutil.status([f for f in self._status.modified if match(f)],
2116 return scmutil.status([f for f in self._status.modified if match(f)],
2176 [f for f in self._status.added if match(f)],
2117 [f for f in self._status.added if match(f)],
2177 [f for f in self._status.removed if match(f)],
2118 [f for f in self._status.removed if match(f)],
2178 [], [], [], clean)
2119 [], [], [], clean)
2179
2120
2180 @propertycache
2121 @propertycache
2181 def _changedset(self):
2122 def _changedset(self):
2182 """Return the set of files changed in this context
2123 """Return the set of files changed in this context
2183 """
2124 """
2184 changed = set(self._status.modified)
2125 changed = set(self._status.modified)
2185 changed.update(self._status.added)
2126 changed.update(self._status.added)
2186 changed.update(self._status.removed)
2127 changed.update(self._status.removed)
2187 return changed
2128 return changed
2188
2129
2189 def makecachingfilectxfn(func):
2130 def makecachingfilectxfn(func):
2190 """Create a filectxfn that caches based on the path.
2131 """Create a filectxfn that caches based on the path.
2191
2132
2192 We can't use util.cachefunc because it uses all arguments as the cache
2133 We can't use util.cachefunc because it uses all arguments as the cache
2193 key and this creates a cycle since the arguments include the repo and
2134 key and this creates a cycle since the arguments include the repo and
2194 memctx.
2135 memctx.
2195 """
2136 """
2196 cache = {}
2137 cache = {}
2197
2138
2198 def getfilectx(repo, memctx, path):
2139 def getfilectx(repo, memctx, path):
2199 if path not in cache:
2140 if path not in cache:
2200 cache[path] = func(repo, memctx, path)
2141 cache[path] = func(repo, memctx, path)
2201 return cache[path]
2142 return cache[path]
2202
2143
2203 return getfilectx
2144 return getfilectx
2204
2145
2205 def memfilefromctx(ctx):
2146 def memfilefromctx(ctx):
2206 """Given a context return a memfilectx for ctx[path]
2147 """Given a context return a memfilectx for ctx[path]
2207
2148
2208 This is a convenience method for building a memctx based on another
2149 This is a convenience method for building a memctx based on another
2209 context.
2150 context.
2210 """
2151 """
2211 def getfilectx(repo, memctx, path):
2152 def getfilectx(repo, memctx, path):
2212 fctx = ctx[path]
2153 fctx = ctx[path]
2213 # this is weird but apparently we only keep track of one parent
2154 # this is weird but apparently we only keep track of one parent
2214 # (why not only store that instead of a tuple?)
2155 # (why not only store that instead of a tuple?)
2215 copied = fctx.renamed()
2156 copied = fctx.renamed()
2216 if copied:
2157 if copied:
2217 copied = copied[0]
2158 copied = copied[0]
2218 return memfilectx(repo, memctx, path, fctx.data(),
2159 return memfilectx(repo, memctx, path, fctx.data(),
2219 islink=fctx.islink(), isexec=fctx.isexec(),
2160 islink=fctx.islink(), isexec=fctx.isexec(),
2220 copied=copied)
2161 copied=copied)
2221
2162
2222 return getfilectx
2163 return getfilectx
2223
2164
2224 def memfilefrompatch(patchstore):
2165 def memfilefrompatch(patchstore):
2225 """Given a patch (e.g. patchstore object) return a memfilectx
2166 """Given a patch (e.g. patchstore object) return a memfilectx
2226
2167
2227 This is a convenience method for building a memctx based on a patchstore.
2168 This is a convenience method for building a memctx based on a patchstore.
2228 """
2169 """
2229 def getfilectx(repo, memctx, path):
2170 def getfilectx(repo, memctx, path):
2230 data, mode, copied = patchstore.getfile(path)
2171 data, mode, copied = patchstore.getfile(path)
2231 if data is None:
2172 if data is None:
2232 return None
2173 return None
2233 islink, isexec = mode
2174 islink, isexec = mode
2234 return memfilectx(repo, memctx, path, data, islink=islink,
2175 return memfilectx(repo, memctx, path, data, islink=islink,
2235 isexec=isexec, copied=copied)
2176 isexec=isexec, copied=copied)
2236
2177
2237 return getfilectx
2178 return getfilectx
2238
2179
2239 class memctx(committablectx):
2180 class memctx(committablectx):
2240 """Use memctx to perform in-memory commits via localrepo.commitctx().
2181 """Use memctx to perform in-memory commits via localrepo.commitctx().
2241
2182
2242 Revision information is supplied at initialization time while
2183 Revision information is supplied at initialization time while
2243 related files data and is made available through a callback
2184 related files data and is made available through a callback
2244 mechanism. 'repo' is the current localrepo, 'parents' is a
2185 mechanism. 'repo' is the current localrepo, 'parents' is a
2245 sequence of two parent revisions identifiers (pass None for every
2186 sequence of two parent revisions identifiers (pass None for every
2246 missing parent), 'text' is the commit message and 'files' lists
2187 missing parent), 'text' is the commit message and 'files' lists
2247 names of files touched by the revision (normalized and relative to
2188 names of files touched by the revision (normalized and relative to
2248 repository root).
2189 repository root).
2249
2190
2250 filectxfn(repo, memctx, path) is a callable receiving the
2191 filectxfn(repo, memctx, path) is a callable receiving the
2251 repository, the current memctx object and the normalized path of
2192 repository, the current memctx object and the normalized path of
2252 requested file, relative to repository root. It is fired by the
2193 requested file, relative to repository root. It is fired by the
2253 commit function for every file in 'files', but calls order is
2194 commit function for every file in 'files', but calls order is
2254 undefined. If the file is available in the revision being
2195 undefined. If the file is available in the revision being
2255 committed (updated or added), filectxfn returns a memfilectx
2196 committed (updated or added), filectxfn returns a memfilectx
2256 object. If the file was removed, filectxfn return None for recent
2197 object. If the file was removed, filectxfn return None for recent
2257 Mercurial. Moved files are represented by marking the source file
2198 Mercurial. Moved files are represented by marking the source file
2258 removed and the new file added with copy information (see
2199 removed and the new file added with copy information (see
2259 memfilectx).
2200 memfilectx).
2260
2201
2261 user receives the committer name and defaults to current
2202 user receives the committer name and defaults to current
2262 repository username, date is the commit date in any format
2203 repository username, date is the commit date in any format
2263 supported by dateutil.parsedate() and defaults to current date, extra
2204 supported by dateutil.parsedate() and defaults to current date, extra
2264 is a dictionary of metadata or is left empty.
2205 is a dictionary of metadata or is left empty.
2265 """
2206 """
2266
2207
2267 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2208 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2268 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2209 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2269 # this field to determine what to do in filectxfn.
2210 # this field to determine what to do in filectxfn.
2270 _returnnoneformissingfiles = True
2211 _returnnoneformissingfiles = True
2271
2212
2272 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2213 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2273 date=None, extra=None, branch=None, editor=False):
2214 date=None, extra=None, branch=None, editor=False):
2274 super(memctx, self).__init__(repo, text, user, date, extra)
2215 super(memctx, self).__init__(repo, text, user, date, extra)
2275 self._rev = None
2216 self._rev = None
2276 self._node = None
2217 self._node = None
2277 parents = [(p or nullid) for p in parents]
2218 parents = [(p or nullid) for p in parents]
2278 p1, p2 = parents
2219 p1, p2 = parents
2279 self._parents = [self._repo[p] for p in (p1, p2)]
2220 self._parents = [self._repo[p] for p in (p1, p2)]
2280 files = sorted(set(files))
2221 files = sorted(set(files))
2281 self._files = files
2222 self._files = files
2282 if branch is not None:
2223 if branch is not None:
2283 self._extra['branch'] = encoding.fromlocal(branch)
2224 self._extra['branch'] = encoding.fromlocal(branch)
2284 self.substate = {}
2225 self.substate = {}
2285
2226
2286 if isinstance(filectxfn, patch.filestore):
2227 if isinstance(filectxfn, patch.filestore):
2287 filectxfn = memfilefrompatch(filectxfn)
2228 filectxfn = memfilefrompatch(filectxfn)
2288 elif not callable(filectxfn):
2229 elif not callable(filectxfn):
2289 # if store is not callable, wrap it in a function
2230 # if store is not callable, wrap it in a function
2290 filectxfn = memfilefromctx(filectxfn)
2231 filectxfn = memfilefromctx(filectxfn)
2291
2232
2292 # memoizing increases performance for e.g. vcs convert scenarios.
2233 # memoizing increases performance for e.g. vcs convert scenarios.
2293 self._filectxfn = makecachingfilectxfn(filectxfn)
2234 self._filectxfn = makecachingfilectxfn(filectxfn)
2294
2235
2295 if editor:
2236 if editor:
2296 self._text = editor(self._repo, self, [])
2237 self._text = editor(self._repo, self, [])
2297 self._repo.savecommitmessage(self._text)
2238 self._repo.savecommitmessage(self._text)
2298
2239
2299 def filectx(self, path, filelog=None):
2240 def filectx(self, path, filelog=None):
2300 """get a file context from the working directory
2241 """get a file context from the working directory
2301
2242
2302 Returns None if file doesn't exist and should be removed."""
2243 Returns None if file doesn't exist and should be removed."""
2303 return self._filectxfn(self._repo, self, path)
2244 return self._filectxfn(self._repo, self, path)
2304
2245
2305 def commit(self):
2246 def commit(self):
2306 """commit context to the repo"""
2247 """commit context to the repo"""
2307 return self._repo.commitctx(self)
2248 return self._repo.commitctx(self)
2308
2249
2309 @propertycache
2250 @propertycache
2310 def _manifest(self):
2251 def _manifest(self):
2311 """generate a manifest based on the return values of filectxfn"""
2252 """generate a manifest based on the return values of filectxfn"""
2312
2253
2313 # keep this simple for now; just worry about p1
2254 # keep this simple for now; just worry about p1
2314 pctx = self._parents[0]
2255 pctx = self._parents[0]
2315 man = pctx.manifest().copy()
2256 man = pctx.manifest().copy()
2316
2257
2317 for f in self._status.modified:
2258 for f in self._status.modified:
2318 p1node = nullid
2259 p1node = nullid
2319 p2node = nullid
2260 p2node = nullid
2320 p = pctx[f].parents() # if file isn't in pctx, check p2?
2261 p = pctx[f].parents() # if file isn't in pctx, check p2?
2321 if len(p) > 0:
2262 if len(p) > 0:
2322 p1node = p[0].filenode()
2263 p1node = p[0].filenode()
2323 if len(p) > 1:
2264 if len(p) > 1:
2324 p2node = p[1].filenode()
2265 p2node = p[1].filenode()
2325 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2266 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2326
2267
2327 for f in self._status.added:
2268 for f in self._status.added:
2328 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2269 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2329
2270
2330 for f in self._status.removed:
2271 for f in self._status.removed:
2331 if f in man:
2272 if f in man:
2332 del man[f]
2273 del man[f]
2333
2274
2334 return man
2275 return man
2335
2276
2336 @propertycache
2277 @propertycache
2337 def _status(self):
2278 def _status(self):
2338 """Calculate exact status from ``files`` specified at construction
2279 """Calculate exact status from ``files`` specified at construction
2339 """
2280 """
2340 man1 = self.p1().manifest()
2281 man1 = self.p1().manifest()
2341 p2 = self._parents[1]
2282 p2 = self._parents[1]
2342 # "1 < len(self._parents)" can't be used for checking
2283 # "1 < len(self._parents)" can't be used for checking
2343 # existence of the 2nd parent, because "memctx._parents" is
2284 # existence of the 2nd parent, because "memctx._parents" is
2344 # explicitly initialized by the list, of which length is 2.
2285 # explicitly initialized by the list, of which length is 2.
2345 if p2.node() != nullid:
2286 if p2.node() != nullid:
2346 man2 = p2.manifest()
2287 man2 = p2.manifest()
2347 managing = lambda f: f in man1 or f in man2
2288 managing = lambda f: f in man1 or f in man2
2348 else:
2289 else:
2349 managing = lambda f: f in man1
2290 managing = lambda f: f in man1
2350
2291
2351 modified, added, removed = [], [], []
2292 modified, added, removed = [], [], []
2352 for f in self._files:
2293 for f in self._files:
2353 if not managing(f):
2294 if not managing(f):
2354 added.append(f)
2295 added.append(f)
2355 elif self[f]:
2296 elif self[f]:
2356 modified.append(f)
2297 modified.append(f)
2357 else:
2298 else:
2358 removed.append(f)
2299 removed.append(f)
2359
2300
2360 return scmutil.status(modified, added, removed, [], [], [], [])
2301 return scmutil.status(modified, added, removed, [], [], [], [])
2361
2302
2362 class memfilectx(committablefilectx):
2303 class memfilectx(committablefilectx):
2363 """memfilectx represents an in-memory file to commit.
2304 """memfilectx represents an in-memory file to commit.
2364
2305
2365 See memctx and committablefilectx for more details.
2306 See memctx and committablefilectx for more details.
2366 """
2307 """
2367 def __init__(self, repo, changectx, path, data, islink=False,
2308 def __init__(self, repo, changectx, path, data, islink=False,
2368 isexec=False, copied=None):
2309 isexec=False, copied=None):
2369 """
2310 """
2370 path is the normalized file path relative to repository root.
2311 path is the normalized file path relative to repository root.
2371 data is the file content as a string.
2312 data is the file content as a string.
2372 islink is True if the file is a symbolic link.
2313 islink is True if the file is a symbolic link.
2373 isexec is True if the file is executable.
2314 isexec is True if the file is executable.
2374 copied is the source file path if current file was copied in the
2315 copied is the source file path if current file was copied in the
2375 revision being committed, or None."""
2316 revision being committed, or None."""
2376 super(memfilectx, self).__init__(repo, path, None, changectx)
2317 super(memfilectx, self).__init__(repo, path, None, changectx)
2377 self._data = data
2318 self._data = data
2378 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2319 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2379 self._copied = None
2320 self._copied = None
2380 if copied:
2321 if copied:
2381 self._copied = (copied, nullid)
2322 self._copied = (copied, nullid)
2382
2323
2383 def data(self):
2324 def data(self):
2384 return self._data
2325 return self._data
2385
2326
2386 def remove(self, ignoremissing=False):
2327 def remove(self, ignoremissing=False):
2387 """wraps unlink for a repo's working directory"""
2328 """wraps unlink for a repo's working directory"""
2388 # need to figure out what to do here
2329 # need to figure out what to do here
2389 del self._changectx[self._path]
2330 del self._changectx[self._path]
2390
2331
2391 def write(self, data, flags, **kwargs):
2332 def write(self, data, flags, **kwargs):
2392 """wraps repo.wwrite"""
2333 """wraps repo.wwrite"""
2393 self._data = data
2334 self._data = data
2394
2335
2395 class overlayfilectx(committablefilectx):
2336 class overlayfilectx(committablefilectx):
2396 """Like memfilectx but take an original filectx and optional parameters to
2337 """Like memfilectx but take an original filectx and optional parameters to
2397 override parts of it. This is useful when fctx.data() is expensive (i.e.
2338 override parts of it. This is useful when fctx.data() is expensive (i.e.
2398 flag processor is expensive) and raw data, flags, and filenode could be
2339 flag processor is expensive) and raw data, flags, and filenode could be
2399 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2340 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2400 """
2341 """
2401
2342
2402 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2343 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2403 copied=None, ctx=None):
2344 copied=None, ctx=None):
2404 """originalfctx: filecontext to duplicate
2345 """originalfctx: filecontext to duplicate
2405
2346
2406 datafunc: None or a function to override data (file content). It is a
2347 datafunc: None or a function to override data (file content). It is a
2407 function to be lazy. path, flags, copied, ctx: None or overridden value
2348 function to be lazy. path, flags, copied, ctx: None or overridden value
2408
2349
2409 copied could be (path, rev), or False. copied could also be just path,
2350 copied could be (path, rev), or False. copied could also be just path,
2410 and will be converted to (path, nullid). This simplifies some callers.
2351 and will be converted to (path, nullid). This simplifies some callers.
2411 """
2352 """
2412
2353
2413 if path is None:
2354 if path is None:
2414 path = originalfctx.path()
2355 path = originalfctx.path()
2415 if ctx is None:
2356 if ctx is None:
2416 ctx = originalfctx.changectx()
2357 ctx = originalfctx.changectx()
2417 ctxmatch = lambda: True
2358 ctxmatch = lambda: True
2418 else:
2359 else:
2419 ctxmatch = lambda: ctx == originalfctx.changectx()
2360 ctxmatch = lambda: ctx == originalfctx.changectx()
2420
2361
2421 repo = originalfctx.repo()
2362 repo = originalfctx.repo()
2422 flog = originalfctx.filelog()
2363 flog = originalfctx.filelog()
2423 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2364 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2424
2365
2425 if copied is None:
2366 if copied is None:
2426 copied = originalfctx.renamed()
2367 copied = originalfctx.renamed()
2427 copiedmatch = lambda: True
2368 copiedmatch = lambda: True
2428 else:
2369 else:
2429 if copied and not isinstance(copied, tuple):
2370 if copied and not isinstance(copied, tuple):
2430 # repo._filecommit will recalculate copyrev so nullid is okay
2371 # repo._filecommit will recalculate copyrev so nullid is okay
2431 copied = (copied, nullid)
2372 copied = (copied, nullid)
2432 copiedmatch = lambda: copied == originalfctx.renamed()
2373 copiedmatch = lambda: copied == originalfctx.renamed()
2433
2374
2434 # When data, copied (could affect data), ctx (could affect filelog
2375 # When data, copied (could affect data), ctx (could affect filelog
2435 # parents) are not overridden, rawdata, rawflags, and filenode may be
2376 # parents) are not overridden, rawdata, rawflags, and filenode may be
2436 # reused (repo._filecommit should double check filelog parents).
2377 # reused (repo._filecommit should double check filelog parents).
2437 #
2378 #
2438 # path, flags are not hashed in filelog (but in manifestlog) so they do
2379 # path, flags are not hashed in filelog (but in manifestlog) so they do
2439 # not affect reusable here.
2380 # not affect reusable here.
2440 #
2381 #
2441 # If ctx or copied is overridden to a same value with originalfctx,
2382 # If ctx or copied is overridden to a same value with originalfctx,
2442 # still consider it's reusable. originalfctx.renamed() may be a bit
2383 # still consider it's reusable. originalfctx.renamed() may be a bit
2443 # expensive so it's not called unless necessary. Assuming datafunc is
2384 # expensive so it's not called unless necessary. Assuming datafunc is
2444 # always expensive, do not call it for this "reusable" test.
2385 # always expensive, do not call it for this "reusable" test.
2445 reusable = datafunc is None and ctxmatch() and copiedmatch()
2386 reusable = datafunc is None and ctxmatch() and copiedmatch()
2446
2387
2447 if datafunc is None:
2388 if datafunc is None:
2448 datafunc = originalfctx.data
2389 datafunc = originalfctx.data
2449 if flags is None:
2390 if flags is None:
2450 flags = originalfctx.flags()
2391 flags = originalfctx.flags()
2451
2392
2452 self._datafunc = datafunc
2393 self._datafunc = datafunc
2453 self._flags = flags
2394 self._flags = flags
2454 self._copied = copied
2395 self._copied = copied
2455
2396
2456 if reusable:
2397 if reusable:
2457 # copy extra fields from originalfctx
2398 # copy extra fields from originalfctx
2458 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2399 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2459 for attr_ in attrs:
2400 for attr_ in attrs:
2460 if util.safehasattr(originalfctx, attr_):
2401 if util.safehasattr(originalfctx, attr_):
2461 setattr(self, attr_, getattr(originalfctx, attr_))
2402 setattr(self, attr_, getattr(originalfctx, attr_))
2462
2403
2463 def data(self):
2404 def data(self):
2464 return self._datafunc()
2405 return self._datafunc()
2465
2406
2466 class metadataonlyctx(committablectx):
2407 class metadataonlyctx(committablectx):
2467 """Like memctx but it's reusing the manifest of different commit.
2408 """Like memctx but it's reusing the manifest of different commit.
2468 Intended to be used by lightweight operations that are creating
2409 Intended to be used by lightweight operations that are creating
2469 metadata-only changes.
2410 metadata-only changes.
2470
2411
2471 Revision information is supplied at initialization time. 'repo' is the
2412 Revision information is supplied at initialization time. 'repo' is the
2472 current localrepo, 'ctx' is original revision which manifest we're reuisng
2413 current localrepo, 'ctx' is original revision which manifest we're reuisng
2473 'parents' is a sequence of two parent revisions identifiers (pass None for
2414 'parents' is a sequence of two parent revisions identifiers (pass None for
2474 every missing parent), 'text' is the commit.
2415 every missing parent), 'text' is the commit.
2475
2416
2476 user receives the committer name and defaults to current repository
2417 user receives the committer name and defaults to current repository
2477 username, date is the commit date in any format supported by
2418 username, date is the commit date in any format supported by
2478 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2419 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2479 metadata or is left empty.
2420 metadata or is left empty.
2480 """
2421 """
2481 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2422 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2482 date=None, extra=None, editor=False):
2423 date=None, extra=None, editor=False):
2483 if text is None:
2424 if text is None:
2484 text = originalctx.description()
2425 text = originalctx.description()
2485 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2426 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2486 self._rev = None
2427 self._rev = None
2487 self._node = None
2428 self._node = None
2488 self._originalctx = originalctx
2429 self._originalctx = originalctx
2489 self._manifestnode = originalctx.manifestnode()
2430 self._manifestnode = originalctx.manifestnode()
2490 if parents is None:
2431 if parents is None:
2491 parents = originalctx.parents()
2432 parents = originalctx.parents()
2492 else:
2433 else:
2493 parents = [repo[p] for p in parents if p is not None]
2434 parents = [repo[p] for p in parents if p is not None]
2494 parents = parents[:]
2435 parents = parents[:]
2495 while len(parents) < 2:
2436 while len(parents) < 2:
2496 parents.append(repo[nullid])
2437 parents.append(repo[nullid])
2497 p1, p2 = self._parents = parents
2438 p1, p2 = self._parents = parents
2498
2439
2499 # sanity check to ensure that the reused manifest parents are
2440 # sanity check to ensure that the reused manifest parents are
2500 # manifests of our commit parents
2441 # manifests of our commit parents
2501 mp1, mp2 = self.manifestctx().parents
2442 mp1, mp2 = self.manifestctx().parents
2502 if p1 != nullid and p1.manifestnode() != mp1:
2443 if p1 != nullid and p1.manifestnode() != mp1:
2503 raise RuntimeError('can\'t reuse the manifest: '
2444 raise RuntimeError('can\'t reuse the manifest: '
2504 'its p1 doesn\'t match the new ctx p1')
2445 'its p1 doesn\'t match the new ctx p1')
2505 if p2 != nullid and p2.manifestnode() != mp2:
2446 if p2 != nullid and p2.manifestnode() != mp2:
2506 raise RuntimeError('can\'t reuse the manifest: '
2447 raise RuntimeError('can\'t reuse the manifest: '
2507 'its p2 doesn\'t match the new ctx p2')
2448 'its p2 doesn\'t match the new ctx p2')
2508
2449
2509 self._files = originalctx.files()
2450 self._files = originalctx.files()
2510 self.substate = {}
2451 self.substate = {}
2511
2452
2512 if editor:
2453 if editor:
2513 self._text = editor(self._repo, self, [])
2454 self._text = editor(self._repo, self, [])
2514 self._repo.savecommitmessage(self._text)
2455 self._repo.savecommitmessage(self._text)
2515
2456
2516 def manifestnode(self):
2457 def manifestnode(self):
2517 return self._manifestnode
2458 return self._manifestnode
2518
2459
2519 @property
2460 @property
2520 def _manifestctx(self):
2461 def _manifestctx(self):
2521 return self._repo.manifestlog[self._manifestnode]
2462 return self._repo.manifestlog[self._manifestnode]
2522
2463
2523 def filectx(self, path, filelog=None):
2464 def filectx(self, path, filelog=None):
2524 return self._originalctx.filectx(path, filelog=filelog)
2465 return self._originalctx.filectx(path, filelog=filelog)
2525
2466
2526 def commit(self):
2467 def commit(self):
2527 """commit context to the repo"""
2468 """commit context to the repo"""
2528 return self._repo.commitctx(self)
2469 return self._repo.commitctx(self)
2529
2470
2530 @property
2471 @property
2531 def _manifest(self):
2472 def _manifest(self):
2532 return self._originalctx.manifest()
2473 return self._originalctx.manifest()
2533
2474
2534 @propertycache
2475 @propertycache
2535 def _status(self):
2476 def _status(self):
2536 """Calculate exact status from ``files`` specified in the ``origctx``
2477 """Calculate exact status from ``files`` specified in the ``origctx``
2537 and parents manifests.
2478 and parents manifests.
2538 """
2479 """
2539 man1 = self.p1().manifest()
2480 man1 = self.p1().manifest()
2540 p2 = self._parents[1]
2481 p2 = self._parents[1]
2541 # "1 < len(self._parents)" can't be used for checking
2482 # "1 < len(self._parents)" can't be used for checking
2542 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2483 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2543 # explicitly initialized by the list, of which length is 2.
2484 # explicitly initialized by the list, of which length is 2.
2544 if p2.node() != nullid:
2485 if p2.node() != nullid:
2545 man2 = p2.manifest()
2486 man2 = p2.manifest()
2546 managing = lambda f: f in man1 or f in man2
2487 managing = lambda f: f in man1 or f in man2
2547 else:
2488 else:
2548 managing = lambda f: f in man1
2489 managing = lambda f: f in man1
2549
2490
2550 modified, added, removed = [], [], []
2491 modified, added, removed = [], [], []
2551 for f in self._files:
2492 for f in self._files:
2552 if not managing(f):
2493 if not managing(f):
2553 added.append(f)
2494 added.append(f)
2554 elif f in self:
2495 elif f in self:
2555 modified.append(f)
2496 modified.append(f)
2556 else:
2497 else:
2557 removed.append(f)
2498 removed.append(f)
2558
2499
2559 return scmutil.status(modified, added, removed, [], [], [], [])
2500 return scmutil.status(modified, added, removed, [], [], [], [])
2560
2501
2561 class arbitraryfilectx(object):
2502 class arbitraryfilectx(object):
2562 """Allows you to use filectx-like functions on a file in an arbitrary
2503 """Allows you to use filectx-like functions on a file in an arbitrary
2563 location on disk, possibly not in the working directory.
2504 location on disk, possibly not in the working directory.
2564 """
2505 """
2565 def __init__(self, path, repo=None):
2506 def __init__(self, path, repo=None):
2566 # Repo is optional because contrib/simplemerge uses this class.
2507 # Repo is optional because contrib/simplemerge uses this class.
2567 self._repo = repo
2508 self._repo = repo
2568 self._path = path
2509 self._path = path
2569
2510
2570 def cmp(self, fctx):
2511 def cmp(self, fctx):
2571 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2512 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2572 # path if either side is a symlink.
2513 # path if either side is a symlink.
2573 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2514 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2574 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2515 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2575 # Add a fast-path for merge if both sides are disk-backed.
2516 # Add a fast-path for merge if both sides are disk-backed.
2576 # Note that filecmp uses the opposite return values (True if same)
2517 # Note that filecmp uses the opposite return values (True if same)
2577 # from our cmp functions (True if different).
2518 # from our cmp functions (True if different).
2578 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2519 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2579 return self.data() != fctx.data()
2520 return self.data() != fctx.data()
2580
2521
2581 def path(self):
2522 def path(self):
2582 return self._path
2523 return self._path
2583
2524
2584 def flags(self):
2525 def flags(self):
2585 return ''
2526 return ''
2586
2527
2587 def data(self):
2528 def data(self):
2588 return util.readfile(self._path)
2529 return util.readfile(self._path)
2589
2530
2590 def decodeddata(self):
2531 def decodeddata(self):
2591 with open(self._path, "rb") as f:
2532 with open(self._path, "rb") as f:
2592 return f.read()
2533 return f.read()
2593
2534
2594 def remove(self):
2535 def remove(self):
2595 util.unlink(self._path)
2536 util.unlink(self._path)
2596
2537
2597 def write(self, data, flags, **kwargs):
2538 def write(self, data, flags, **kwargs):
2598 assert not flags
2539 assert not flags
2599 with open(self._path, "w") as f:
2540 with open(self._path, "w") as f:
2600 f.write(data)
2541 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now