##// END OF EJS Templates
context: clarify the various mode in the filesremoved method...
marmoute -
r43292:15badd62 default
parent child Browse files
Show More
@@ -1,2587 +1,2592 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 hex,
18 hex,
19 modifiednodeid,
19 modifiednodeid,
20 nullid,
20 nullid,
21 nullrev,
21 nullrev,
22 short,
22 short,
23 wdirfilenodeids,
23 wdirfilenodeids,
24 wdirhex,
24 wdirhex,
25 )
25 )
26 from . import (
26 from . import (
27 copies,
27 copies,
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 obsolete as obsmod,
33 obsolete as obsmod,
34 patch,
34 patch,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 repoview,
38 repoview,
39 scmutil,
39 scmutil,
40 sparse,
40 sparse,
41 subrepo,
41 subrepo,
42 subrepoutil,
42 subrepoutil,
43 util,
43 util,
44 )
44 )
45 from .utils import (
45 from .utils import (
46 dateutil,
46 dateutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 propertycache = util.propertycache
50 propertycache = util.propertycache
51
51
52 class basectx(object):
52 class basectx(object):
53 """A basectx object represents the common logic for its children:
53 """A basectx object represents the common logic for its children:
54 changectx: read-only context that is already present in the repo,
54 changectx: read-only context that is already present in the repo,
55 workingctx: a context that represents the working directory and can
55 workingctx: a context that represents the working directory and can
56 be committed,
56 be committed,
57 memctx: a context that represents changes in-memory and can also
57 memctx: a context that represents changes in-memory and can also
58 be committed."""
58 be committed."""
59
59
60 def __init__(self, repo):
60 def __init__(self, repo):
61 self._repo = repo
61 self._repo = repo
62
62
63 def __bytes__(self):
63 def __bytes__(self):
64 return short(self.node())
64 return short(self.node())
65
65
66 __str__ = encoding.strmethod(__bytes__)
66 __str__ = encoding.strmethod(__bytes__)
67
67
68 def __repr__(self):
68 def __repr__(self):
69 return r"<%s %s>" % (type(self).__name__, str(self))
69 return r"<%s %s>" % (type(self).__name__, str(self))
70
70
71 def __eq__(self, other):
71 def __eq__(self, other):
72 try:
72 try:
73 return type(self) == type(other) and self._rev == other._rev
73 return type(self) == type(other) and self._rev == other._rev
74 except AttributeError:
74 except AttributeError:
75 return False
75 return False
76
76
77 def __ne__(self, other):
77 def __ne__(self, other):
78 return not (self == other)
78 return not (self == other)
79
79
80 def __contains__(self, key):
80 def __contains__(self, key):
81 return key in self._manifest
81 return key in self._manifest
82
82
83 def __getitem__(self, key):
83 def __getitem__(self, key):
84 return self.filectx(key)
84 return self.filectx(key)
85
85
86 def __iter__(self):
86 def __iter__(self):
87 return iter(self._manifest)
87 return iter(self._manifest)
88
88
89 def _buildstatusmanifest(self, status):
89 def _buildstatusmanifest(self, status):
90 """Builds a manifest that includes the given status results, if this is
90 """Builds a manifest that includes the given status results, if this is
91 a working copy context. For non-working copy contexts, it just returns
91 a working copy context. For non-working copy contexts, it just returns
92 the normal manifest."""
92 the normal manifest."""
93 return self.manifest()
93 return self.manifest()
94
94
95 def _matchstatus(self, other, match):
95 def _matchstatus(self, other, match):
96 """This internal method provides a way for child objects to override the
96 """This internal method provides a way for child objects to override the
97 match operator.
97 match operator.
98 """
98 """
99 return match
99 return match
100
100
101 def _buildstatus(self, other, s, match, listignored, listclean,
101 def _buildstatus(self, other, s, match, listignored, listclean,
102 listunknown):
102 listunknown):
103 """build a status with respect to another context"""
103 """build a status with respect to another context"""
104 # Load earliest manifest first for caching reasons. More specifically,
104 # Load earliest manifest first for caching reasons. More specifically,
105 # if you have revisions 1000 and 1001, 1001 is probably stored as a
105 # if you have revisions 1000 and 1001, 1001 is probably stored as a
106 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
106 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
107 # 1000 and cache it so that when you read 1001, we just need to apply a
107 # 1000 and cache it so that when you read 1001, we just need to apply a
108 # delta to what's in the cache. So that's one full reconstruction + one
108 # delta to what's in the cache. So that's one full reconstruction + one
109 # delta application.
109 # delta application.
110 mf2 = None
110 mf2 = None
111 if self.rev() is not None and self.rev() < other.rev():
111 if self.rev() is not None and self.rev() < other.rev():
112 mf2 = self._buildstatusmanifest(s)
112 mf2 = self._buildstatusmanifest(s)
113 mf1 = other._buildstatusmanifest(s)
113 mf1 = other._buildstatusmanifest(s)
114 if mf2 is None:
114 if mf2 is None:
115 mf2 = self._buildstatusmanifest(s)
115 mf2 = self._buildstatusmanifest(s)
116
116
117 modified, added = [], []
117 modified, added = [], []
118 removed = []
118 removed = []
119 clean = []
119 clean = []
120 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
120 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deletedset = set(deleted)
121 deletedset = set(deleted)
122 d = mf1.diff(mf2, match=match, clean=listclean)
122 d = mf1.diff(mf2, match=match, clean=listclean)
123 for fn, value in d.iteritems():
123 for fn, value in d.iteritems():
124 if fn in deletedset:
124 if fn in deletedset:
125 continue
125 continue
126 if value is None:
126 if value is None:
127 clean.append(fn)
127 clean.append(fn)
128 continue
128 continue
129 (node1, flag1), (node2, flag2) = value
129 (node1, flag1), (node2, flag2) = value
130 if node1 is None:
130 if node1 is None:
131 added.append(fn)
131 added.append(fn)
132 elif node2 is None:
132 elif node2 is None:
133 removed.append(fn)
133 removed.append(fn)
134 elif flag1 != flag2:
134 elif flag1 != flag2:
135 modified.append(fn)
135 modified.append(fn)
136 elif node2 not in wdirfilenodeids:
136 elif node2 not in wdirfilenodeids:
137 # When comparing files between two commits, we save time by
137 # When comparing files between two commits, we save time by
138 # not comparing the file contents when the nodeids differ.
138 # not comparing the file contents when the nodeids differ.
139 # Note that this means we incorrectly report a reverted change
139 # Note that this means we incorrectly report a reverted change
140 # to a file as a modification.
140 # to a file as a modification.
141 modified.append(fn)
141 modified.append(fn)
142 elif self[fn].cmp(other[fn]):
142 elif self[fn].cmp(other[fn]):
143 modified.append(fn)
143 modified.append(fn)
144 else:
144 else:
145 clean.append(fn)
145 clean.append(fn)
146
146
147 if removed:
147 if removed:
148 # need to filter files if they are already reported as removed
148 # need to filter files if they are already reported as removed
149 unknown = [fn for fn in unknown if fn not in mf1 and
149 unknown = [fn for fn in unknown if fn not in mf1 and
150 (not match or match(fn))]
150 (not match or match(fn))]
151 ignored = [fn for fn in ignored if fn not in mf1 and
151 ignored = [fn for fn in ignored if fn not in mf1 and
152 (not match or match(fn))]
152 (not match or match(fn))]
153 # if they're deleted, don't report them as removed
153 # if they're deleted, don't report them as removed
154 removed = [fn for fn in removed if fn not in deletedset]
154 removed = [fn for fn in removed if fn not in deletedset]
155
155
156 return scmutil.status(modified, added, removed, deleted, unknown,
156 return scmutil.status(modified, added, removed, deleted, unknown,
157 ignored, clean)
157 ignored, clean)
158
158
159 @propertycache
159 @propertycache
160 def substate(self):
160 def substate(self):
161 return subrepoutil.state(self, self._repo.ui)
161 return subrepoutil.state(self, self._repo.ui)
162
162
163 def subrev(self, subpath):
163 def subrev(self, subpath):
164 return self.substate[subpath][1]
164 return self.substate[subpath][1]
165
165
166 def rev(self):
166 def rev(self):
167 return self._rev
167 return self._rev
168 def node(self):
168 def node(self):
169 return self._node
169 return self._node
170 def hex(self):
170 def hex(self):
171 return hex(self.node())
171 return hex(self.node())
172 def manifest(self):
172 def manifest(self):
173 return self._manifest
173 return self._manifest
174 def manifestctx(self):
174 def manifestctx(self):
175 return self._manifestctx
175 return self._manifestctx
176 def repo(self):
176 def repo(self):
177 return self._repo
177 return self._repo
178 def phasestr(self):
178 def phasestr(self):
179 return phases.phasenames[self.phase()]
179 return phases.phasenames[self.phase()]
180 def mutable(self):
180 def mutable(self):
181 return self.phase() > phases.public
181 return self.phase() > phases.public
182
182
183 def matchfileset(self, expr, badfn=None):
183 def matchfileset(self, expr, badfn=None):
184 return fileset.match(self, expr, badfn=badfn)
184 return fileset.match(self, expr, badfn=badfn)
185
185
186 def obsolete(self):
186 def obsolete(self):
187 """True if the changeset is obsolete"""
187 """True if the changeset is obsolete"""
188 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
188 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
189
189
190 def extinct(self):
190 def extinct(self):
191 """True if the changeset is extinct"""
191 """True if the changeset is extinct"""
192 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
192 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
193
193
194 def orphan(self):
194 def orphan(self):
195 """True if the changeset is not obsolete, but its ancestor is"""
195 """True if the changeset is not obsolete, but its ancestor is"""
196 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
196 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
197
197
198 def phasedivergent(self):
198 def phasedivergent(self):
199 """True if the changeset tries to be a successor of a public changeset
199 """True if the changeset tries to be a successor of a public changeset
200
200
201 Only non-public and non-obsolete changesets may be phase-divergent.
201 Only non-public and non-obsolete changesets may be phase-divergent.
202 """
202 """
203 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
203 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
204
204
205 def contentdivergent(self):
205 def contentdivergent(self):
206 """Is a successor of a changeset with multiple possible successor sets
206 """Is a successor of a changeset with multiple possible successor sets
207
207
208 Only non-public and non-obsolete changesets may be content-divergent.
208 Only non-public and non-obsolete changesets may be content-divergent.
209 """
209 """
210 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
210 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
211
211
212 def isunstable(self):
212 def isunstable(self):
213 """True if the changeset is either orphan, phase-divergent or
213 """True if the changeset is either orphan, phase-divergent or
214 content-divergent"""
214 content-divergent"""
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
216
216
217 def instabilities(self):
217 def instabilities(self):
218 """return the list of instabilities affecting this changeset.
218 """return the list of instabilities affecting this changeset.
219
219
220 Instabilities are returned as strings. possible values are:
220 Instabilities are returned as strings. possible values are:
221 - orphan,
221 - orphan,
222 - phase-divergent,
222 - phase-divergent,
223 - content-divergent.
223 - content-divergent.
224 """
224 """
225 instabilities = []
225 instabilities = []
226 if self.orphan():
226 if self.orphan():
227 instabilities.append('orphan')
227 instabilities.append('orphan')
228 if self.phasedivergent():
228 if self.phasedivergent():
229 instabilities.append('phase-divergent')
229 instabilities.append('phase-divergent')
230 if self.contentdivergent():
230 if self.contentdivergent():
231 instabilities.append('content-divergent')
231 instabilities.append('content-divergent')
232 return instabilities
232 return instabilities
233
233
234 def parents(self):
234 def parents(self):
235 """return contexts for each parent changeset"""
235 """return contexts for each parent changeset"""
236 return self._parents
236 return self._parents
237
237
238 def p1(self):
238 def p1(self):
239 return self._parents[0]
239 return self._parents[0]
240
240
241 def p2(self):
241 def p2(self):
242 parents = self._parents
242 parents = self._parents
243 if len(parents) == 2:
243 if len(parents) == 2:
244 return parents[1]
244 return parents[1]
245 return self._repo[nullrev]
245 return self._repo[nullrev]
246
246
247 def _fileinfo(self, path):
247 def _fileinfo(self, path):
248 if r'_manifest' in self.__dict__:
248 if r'_manifest' in self.__dict__:
249 try:
249 try:
250 return self._manifest[path], self._manifest.flags(path)
250 return self._manifest[path], self._manifest.flags(path)
251 except KeyError:
251 except KeyError:
252 raise error.ManifestLookupError(self._node, path,
252 raise error.ManifestLookupError(self._node, path,
253 _('not found in manifest'))
253 _('not found in manifest'))
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
255 if path in self._manifestdelta:
255 if path in self._manifestdelta:
256 return (self._manifestdelta[path],
256 return (self._manifestdelta[path],
257 self._manifestdelta.flags(path))
257 self._manifestdelta.flags(path))
258 mfl = self._repo.manifestlog
258 mfl = self._repo.manifestlog
259 try:
259 try:
260 node, flag = mfl[self._changeset.manifest].find(path)
260 node, flag = mfl[self._changeset.manifest].find(path)
261 except KeyError:
261 except KeyError:
262 raise error.ManifestLookupError(self._node, path,
262 raise error.ManifestLookupError(self._node, path,
263 _('not found in manifest'))
263 _('not found in manifest'))
264
264
265 return node, flag
265 return node, flag
266
266
267 def filenode(self, path):
267 def filenode(self, path):
268 return self._fileinfo(path)[0]
268 return self._fileinfo(path)[0]
269
269
270 def flags(self, path):
270 def flags(self, path):
271 try:
271 try:
272 return self._fileinfo(path)[1]
272 return self._fileinfo(path)[1]
273 except error.LookupError:
273 except error.LookupError:
274 return ''
274 return ''
275
275
276 @propertycache
276 @propertycache
277 def _copies(self):
277 def _copies(self):
278 return copies.computechangesetcopies(self)
278 return copies.computechangesetcopies(self)
279 def p1copies(self):
279 def p1copies(self):
280 return self._copies[0]
280 return self._copies[0]
281 def p2copies(self):
281 def p2copies(self):
282 return self._copies[1]
282 return self._copies[1]
283
283
284 def sub(self, path, allowcreate=True):
284 def sub(self, path, allowcreate=True):
285 '''return a subrepo for the stored revision of path, never wdir()'''
285 '''return a subrepo for the stored revision of path, never wdir()'''
286 return subrepo.subrepo(self, path, allowcreate=allowcreate)
286 return subrepo.subrepo(self, path, allowcreate=allowcreate)
287
287
288 def nullsub(self, path, pctx):
288 def nullsub(self, path, pctx):
289 return subrepo.nullsubrepo(self, path, pctx)
289 return subrepo.nullsubrepo(self, path, pctx)
290
290
291 def workingsub(self, path):
291 def workingsub(self, path):
292 '''return a subrepo for the stored revision, or wdir if this is a wdir
292 '''return a subrepo for the stored revision, or wdir if this is a wdir
293 context.
293 context.
294 '''
294 '''
295 return subrepo.subrepo(self, path, allowwdir=True)
295 return subrepo.subrepo(self, path, allowwdir=True)
296
296
297 def match(self, pats=None, include=None, exclude=None, default='glob',
297 def match(self, pats=None, include=None, exclude=None, default='glob',
298 listsubrepos=False, badfn=None):
298 listsubrepos=False, badfn=None):
299 r = self._repo
299 r = self._repo
300 return matchmod.match(r.root, r.getcwd(), pats,
300 return matchmod.match(r.root, r.getcwd(), pats,
301 include, exclude, default,
301 include, exclude, default,
302 auditor=r.nofsauditor, ctx=self,
302 auditor=r.nofsauditor, ctx=self,
303 listsubrepos=listsubrepos, badfn=badfn)
303 listsubrepos=listsubrepos, badfn=badfn)
304
304
305 def diff(self, ctx2=None, match=None, changes=None, opts=None,
305 def diff(self, ctx2=None, match=None, changes=None, opts=None,
306 losedatafn=None, pathfn=None, copy=None,
306 losedatafn=None, pathfn=None, copy=None,
307 copysourcematch=None, hunksfilterfn=None):
307 copysourcematch=None, hunksfilterfn=None):
308 """Returns a diff generator for the given contexts and matcher"""
308 """Returns a diff generator for the given contexts and matcher"""
309 if ctx2 is None:
309 if ctx2 is None:
310 ctx2 = self.p1()
310 ctx2 = self.p1()
311 if ctx2 is not None:
311 if ctx2 is not None:
312 ctx2 = self._repo[ctx2]
312 ctx2 = self._repo[ctx2]
313 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
313 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
314 opts=opts, losedatafn=losedatafn, pathfn=pathfn,
314 opts=opts, losedatafn=losedatafn, pathfn=pathfn,
315 copy=copy, copysourcematch=copysourcematch,
315 copy=copy, copysourcematch=copysourcematch,
316 hunksfilterfn=hunksfilterfn)
316 hunksfilterfn=hunksfilterfn)
317
317
318 def dirs(self):
318 def dirs(self):
319 return self._manifest.dirs()
319 return self._manifest.dirs()
320
320
321 def hasdir(self, dir):
321 def hasdir(self, dir):
322 return self._manifest.hasdir(dir)
322 return self._manifest.hasdir(dir)
323
323
324 def status(self, other=None, match=None, listignored=False,
324 def status(self, other=None, match=None, listignored=False,
325 listclean=False, listunknown=False, listsubrepos=False):
325 listclean=False, listunknown=False, listsubrepos=False):
326 """return status of files between two nodes or node and working
326 """return status of files between two nodes or node and working
327 directory.
327 directory.
328
328
329 If other is None, compare this node with working directory.
329 If other is None, compare this node with working directory.
330
330
331 returns (modified, added, removed, deleted, unknown, ignored, clean)
331 returns (modified, added, removed, deleted, unknown, ignored, clean)
332 """
332 """
333
333
334 ctx1 = self
334 ctx1 = self
335 ctx2 = self._repo[other]
335 ctx2 = self._repo[other]
336
336
337 # This next code block is, admittedly, fragile logic that tests for
337 # This next code block is, admittedly, fragile logic that tests for
338 # reversing the contexts and wouldn't need to exist if it weren't for
338 # reversing the contexts and wouldn't need to exist if it weren't for
339 # the fast (and common) code path of comparing the working directory
339 # the fast (and common) code path of comparing the working directory
340 # with its first parent.
340 # with its first parent.
341 #
341 #
342 # What we're aiming for here is the ability to call:
342 # What we're aiming for here is the ability to call:
343 #
343 #
344 # workingctx.status(parentctx)
344 # workingctx.status(parentctx)
345 #
345 #
346 # If we always built the manifest for each context and compared those,
346 # If we always built the manifest for each context and compared those,
347 # then we'd be done. But the special case of the above call means we
347 # then we'd be done. But the special case of the above call means we
348 # just copy the manifest of the parent.
348 # just copy the manifest of the parent.
349 reversed = False
349 reversed = False
350 if (not isinstance(ctx1, changectx)
350 if (not isinstance(ctx1, changectx)
351 and isinstance(ctx2, changectx)):
351 and isinstance(ctx2, changectx)):
352 reversed = True
352 reversed = True
353 ctx1, ctx2 = ctx2, ctx1
353 ctx1, ctx2 = ctx2, ctx1
354
354
355 match = self._repo.narrowmatch(match)
355 match = self._repo.narrowmatch(match)
356 match = ctx2._matchstatus(ctx1, match)
356 match = ctx2._matchstatus(ctx1, match)
357 r = scmutil.status([], [], [], [], [], [], [])
357 r = scmutil.status([], [], [], [], [], [], [])
358 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
358 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
359 listunknown)
359 listunknown)
360
360
361 if reversed:
361 if reversed:
362 # Reverse added and removed. Clear deleted, unknown and ignored as
362 # Reverse added and removed. Clear deleted, unknown and ignored as
363 # these make no sense to reverse.
363 # these make no sense to reverse.
364 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
364 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
365 r.clean)
365 r.clean)
366
366
367 if listsubrepos:
367 if listsubrepos:
368 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
368 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
369 try:
369 try:
370 rev2 = ctx2.subrev(subpath)
370 rev2 = ctx2.subrev(subpath)
371 except KeyError:
371 except KeyError:
372 # A subrepo that existed in node1 was deleted between
372 # A subrepo that existed in node1 was deleted between
373 # node1 and node2 (inclusive). Thus, ctx2's substate
373 # node1 and node2 (inclusive). Thus, ctx2's substate
374 # won't contain that subpath. The best we can do ignore it.
374 # won't contain that subpath. The best we can do ignore it.
375 rev2 = None
375 rev2 = None
376 submatch = matchmod.subdirmatcher(subpath, match)
376 submatch = matchmod.subdirmatcher(subpath, match)
377 s = sub.status(rev2, match=submatch, ignored=listignored,
377 s = sub.status(rev2, match=submatch, ignored=listignored,
378 clean=listclean, unknown=listunknown,
378 clean=listclean, unknown=listunknown,
379 listsubrepos=True)
379 listsubrepos=True)
380 for rfiles, sfiles in zip(r, s):
380 for rfiles, sfiles in zip(r, s):
381 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
381 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
382
382
383 for l in r:
383 for l in r:
384 l.sort()
384 l.sort()
385
385
386 return r
386 return r
387
387
388 class changectx(basectx):
388 class changectx(basectx):
389 """A changecontext object makes access to data related to a particular
389 """A changecontext object makes access to data related to a particular
390 changeset convenient. It represents a read-only context already present in
390 changeset convenient. It represents a read-only context already present in
391 the repo."""
391 the repo."""
392 def __init__(self, repo, rev, node):
392 def __init__(self, repo, rev, node):
393 super(changectx, self).__init__(repo)
393 super(changectx, self).__init__(repo)
394 self._rev = rev
394 self._rev = rev
395 self._node = node
395 self._node = node
396
396
397 def __hash__(self):
397 def __hash__(self):
398 try:
398 try:
399 return hash(self._rev)
399 return hash(self._rev)
400 except AttributeError:
400 except AttributeError:
401 return id(self)
401 return id(self)
402
402
403 def __nonzero__(self):
403 def __nonzero__(self):
404 return self._rev != nullrev
404 return self._rev != nullrev
405
405
406 __bool__ = __nonzero__
406 __bool__ = __nonzero__
407
407
408 @propertycache
408 @propertycache
409 def _changeset(self):
409 def _changeset(self):
410 return self._repo.changelog.changelogrevision(self.rev())
410 return self._repo.changelog.changelogrevision(self.rev())
411
411
412 @propertycache
412 @propertycache
413 def _manifest(self):
413 def _manifest(self):
414 return self._manifestctx.read()
414 return self._manifestctx.read()
415
415
416 @property
416 @property
417 def _manifestctx(self):
417 def _manifestctx(self):
418 return self._repo.manifestlog[self._changeset.manifest]
418 return self._repo.manifestlog[self._changeset.manifest]
419
419
420 @propertycache
420 @propertycache
421 def _manifestdelta(self):
421 def _manifestdelta(self):
422 return self._manifestctx.readdelta()
422 return self._manifestctx.readdelta()
423
423
424 @propertycache
424 @propertycache
425 def _parents(self):
425 def _parents(self):
426 repo = self._repo
426 repo = self._repo
427 p1, p2 = repo.changelog.parentrevs(self._rev)
427 p1, p2 = repo.changelog.parentrevs(self._rev)
428 if p2 == nullrev:
428 if p2 == nullrev:
429 return [repo[p1]]
429 return [repo[p1]]
430 return [repo[p1], repo[p2]]
430 return [repo[p1], repo[p2]]
431
431
432 def changeset(self):
432 def changeset(self):
433 c = self._changeset
433 c = self._changeset
434 return (
434 return (
435 c.manifest,
435 c.manifest,
436 c.user,
436 c.user,
437 c.date,
437 c.date,
438 c.files,
438 c.files,
439 c.description,
439 c.description,
440 c.extra,
440 c.extra,
441 )
441 )
442 def manifestnode(self):
442 def manifestnode(self):
443 return self._changeset.manifest
443 return self._changeset.manifest
444
444
445 def user(self):
445 def user(self):
446 return self._changeset.user
446 return self._changeset.user
447 def date(self):
447 def date(self):
448 return self._changeset.date
448 return self._changeset.date
449 def files(self):
449 def files(self):
450 return self._changeset.files
450 return self._changeset.files
451 def filesmodified(self):
451 def filesmodified(self):
452 modified = set(self.files())
452 modified = set(self.files())
453 modified.difference_update(self.filesadded())
453 modified.difference_update(self.filesadded())
454 modified.difference_update(self.filesremoved())
454 modified.difference_update(self.filesremoved())
455 return sorted(modified)
455 return sorted(modified)
456
456
457 def filesadded(self):
457 def filesadded(self):
458 source = self._repo.ui.config('experimental', 'copies.read-from')
458 source = self._repo.ui.config('experimental', 'copies.read-from')
459 filesadded = self._changeset.filesadded
459 filesadded = self._changeset.filesadded
460 if source == 'changeset-only':
460 if source == 'changeset-only':
461 if filesadded is None:
461 if filesadded is None:
462 filesadded = []
462 filesadded = []
463 elif source == 'compatibility':
463 elif source == 'compatibility':
464 if filesadded is None:
464 if filesadded is None:
465 filesadded = scmutil.computechangesetfilesadded(self)
465 filesadded = scmutil.computechangesetfilesadded(self)
466 else:
466 else:
467 filesadded = scmutil.computechangesetfilesadded(self)
467 filesadded = scmutil.computechangesetfilesadded(self)
468 return filesadded
468 return filesadded
469
469
470 def filesremoved(self):
470 def filesremoved(self):
471 source = self._repo.ui.config('experimental', 'copies.read-from')
471 source = self._repo.ui.config('experimental', 'copies.read-from')
472 if (source == 'changeset-only' or
472 filesremoved = self._changeset.filesremoved
473 (source == 'compatibility' and
473 if source == 'changeset-only':
474 self._changeset.filesremoved is not None)):
474 if filesremoved is None:
475 return self._changeset.filesremoved or []
475 filesremoved = []
476 return scmutil.computechangesetfilesremoved(self)
476 elif source == 'compatibility':
477 if filesremoved is None:
478 filesremoved = scmutil.computechangesetfilesremoved(self)
479 else:
480 filesremoved = scmutil.computechangesetfilesremoved(self)
481 return filesremoved
477
482
478 @propertycache
483 @propertycache
479 def _copies(self):
484 def _copies(self):
480 source = self._repo.ui.config('experimental', 'copies.read-from')
485 source = self._repo.ui.config('experimental', 'copies.read-from')
481 p1copies = self._changeset.p1copies
486 p1copies = self._changeset.p1copies
482 p2copies = self._changeset.p2copies
487 p2copies = self._changeset.p2copies
483 # If config says to get copy metadata only from changeset, then return
488 # If config says to get copy metadata only from changeset, then return
484 # that, defaulting to {} if there was no copy metadata.
489 # that, defaulting to {} if there was no copy metadata.
485 # In compatibility mode, we return copy data from the changeset if
490 # In compatibility mode, we return copy data from the changeset if
486 # it was recorded there, and otherwise we fall back to getting it from
491 # it was recorded there, and otherwise we fall back to getting it from
487 # the filelogs (below).
492 # the filelogs (below).
488 if (source == 'changeset-only' or
493 if (source == 'changeset-only' or
489 (source == 'compatibility' and p1copies is not None)):
494 (source == 'compatibility' and p1copies is not None)):
490 return p1copies or {}, p2copies or {}
495 return p1copies or {}, p2copies or {}
491
496
492 # Otherwise (config said to read only from filelog, or we are in
497 # Otherwise (config said to read only from filelog, or we are in
493 # compatiblity mode and there is not data in the changeset), we get
498 # compatiblity mode and there is not data in the changeset), we get
494 # the copy metadata from the filelogs.
499 # the copy metadata from the filelogs.
495 return super(changectx, self)._copies
500 return super(changectx, self)._copies
496 def description(self):
501 def description(self):
497 return self._changeset.description
502 return self._changeset.description
498 def branch(self):
503 def branch(self):
499 return encoding.tolocal(self._changeset.extra.get("branch"))
504 return encoding.tolocal(self._changeset.extra.get("branch"))
500 def closesbranch(self):
505 def closesbranch(self):
501 return 'close' in self._changeset.extra
506 return 'close' in self._changeset.extra
502 def extra(self):
507 def extra(self):
503 """Return a dict of extra information."""
508 """Return a dict of extra information."""
504 return self._changeset.extra
509 return self._changeset.extra
505 def tags(self):
510 def tags(self):
506 """Return a list of byte tag names"""
511 """Return a list of byte tag names"""
507 return self._repo.nodetags(self._node)
512 return self._repo.nodetags(self._node)
508 def bookmarks(self):
513 def bookmarks(self):
509 """Return a list of byte bookmark names."""
514 """Return a list of byte bookmark names."""
510 return self._repo.nodebookmarks(self._node)
515 return self._repo.nodebookmarks(self._node)
511 def phase(self):
516 def phase(self):
512 return self._repo._phasecache.phase(self._repo, self._rev)
517 return self._repo._phasecache.phase(self._repo, self._rev)
513 def hidden(self):
518 def hidden(self):
514 return self._rev in repoview.filterrevs(self._repo, 'visible')
519 return self._rev in repoview.filterrevs(self._repo, 'visible')
515
520
516 def isinmemory(self):
521 def isinmemory(self):
517 return False
522 return False
518
523
519 def children(self):
524 def children(self):
520 """return list of changectx contexts for each child changeset.
525 """return list of changectx contexts for each child changeset.
521
526
522 This returns only the immediate child changesets. Use descendants() to
527 This returns only the immediate child changesets. Use descendants() to
523 recursively walk children.
528 recursively walk children.
524 """
529 """
525 c = self._repo.changelog.children(self._node)
530 c = self._repo.changelog.children(self._node)
526 return [self._repo[x] for x in c]
531 return [self._repo[x] for x in c]
527
532
528 def ancestors(self):
533 def ancestors(self):
529 for a in self._repo.changelog.ancestors([self._rev]):
534 for a in self._repo.changelog.ancestors([self._rev]):
530 yield self._repo[a]
535 yield self._repo[a]
531
536
532 def descendants(self):
537 def descendants(self):
533 """Recursively yield all children of the changeset.
538 """Recursively yield all children of the changeset.
534
539
535 For just the immediate children, use children()
540 For just the immediate children, use children()
536 """
541 """
537 for d in self._repo.changelog.descendants([self._rev]):
542 for d in self._repo.changelog.descendants([self._rev]):
538 yield self._repo[d]
543 yield self._repo[d]
539
544
540 def filectx(self, path, fileid=None, filelog=None):
545 def filectx(self, path, fileid=None, filelog=None):
541 """get a file context from this changeset"""
546 """get a file context from this changeset"""
542 if fileid is None:
547 if fileid is None:
543 fileid = self.filenode(path)
548 fileid = self.filenode(path)
544 return filectx(self._repo, path, fileid=fileid,
549 return filectx(self._repo, path, fileid=fileid,
545 changectx=self, filelog=filelog)
550 changectx=self, filelog=filelog)
546
551
547 def ancestor(self, c2, warn=False):
552 def ancestor(self, c2, warn=False):
548 """return the "best" ancestor context of self and c2
553 """return the "best" ancestor context of self and c2
549
554
550 If there are multiple candidates, it will show a message and check
555 If there are multiple candidates, it will show a message and check
551 merge.preferancestor configuration before falling back to the
556 merge.preferancestor configuration before falling back to the
552 revlog ancestor."""
557 revlog ancestor."""
553 # deal with workingctxs
558 # deal with workingctxs
554 n2 = c2._node
559 n2 = c2._node
555 if n2 is None:
560 if n2 is None:
556 n2 = c2._parents[0]._node
561 n2 = c2._parents[0]._node
557 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
562 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
558 if not cahs:
563 if not cahs:
559 anc = nullid
564 anc = nullid
560 elif len(cahs) == 1:
565 elif len(cahs) == 1:
561 anc = cahs[0]
566 anc = cahs[0]
562 else:
567 else:
563 # experimental config: merge.preferancestor
568 # experimental config: merge.preferancestor
564 for r in self._repo.ui.configlist('merge', 'preferancestor'):
569 for r in self._repo.ui.configlist('merge', 'preferancestor'):
565 try:
570 try:
566 ctx = scmutil.revsymbol(self._repo, r)
571 ctx = scmutil.revsymbol(self._repo, r)
567 except error.RepoLookupError:
572 except error.RepoLookupError:
568 continue
573 continue
569 anc = ctx.node()
574 anc = ctx.node()
570 if anc in cahs:
575 if anc in cahs:
571 break
576 break
572 else:
577 else:
573 anc = self._repo.changelog.ancestor(self._node, n2)
578 anc = self._repo.changelog.ancestor(self._node, n2)
574 if warn:
579 if warn:
575 self._repo.ui.status(
580 self._repo.ui.status(
576 (_("note: using %s as ancestor of %s and %s\n") %
581 (_("note: using %s as ancestor of %s and %s\n") %
577 (short(anc), short(self._node), short(n2))) +
582 (short(anc), short(self._node), short(n2))) +
578 ''.join(_(" alternatively, use --config "
583 ''.join(_(" alternatively, use --config "
579 "merge.preferancestor=%s\n") %
584 "merge.preferancestor=%s\n") %
580 short(n) for n in sorted(cahs) if n != anc))
585 short(n) for n in sorted(cahs) if n != anc))
581 return self._repo[anc]
586 return self._repo[anc]
582
587
583 def isancestorof(self, other):
588 def isancestorof(self, other):
584 """True if this changeset is an ancestor of other"""
589 """True if this changeset is an ancestor of other"""
585 return self._repo.changelog.isancestorrev(self._rev, other._rev)
590 return self._repo.changelog.isancestorrev(self._rev, other._rev)
586
591
587 def walk(self, match):
592 def walk(self, match):
588 '''Generates matching file names.'''
593 '''Generates matching file names.'''
589
594
590 # Wrap match.bad method to have message with nodeid
595 # Wrap match.bad method to have message with nodeid
591 def bad(fn, msg):
596 def bad(fn, msg):
592 # The manifest doesn't know about subrepos, so don't complain about
597 # The manifest doesn't know about subrepos, so don't complain about
593 # paths into valid subrepos.
598 # paths into valid subrepos.
594 if any(fn == s or fn.startswith(s + '/')
599 if any(fn == s or fn.startswith(s + '/')
595 for s in self.substate):
600 for s in self.substate):
596 return
601 return
597 match.bad(fn, _('no such file in rev %s') % self)
602 match.bad(fn, _('no such file in rev %s') % self)
598
603
599 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
604 m = matchmod.badmatch(self._repo.narrowmatch(match), bad)
600 return self._manifest.walk(m)
605 return self._manifest.walk(m)
601
606
602 def matches(self, match):
607 def matches(self, match):
603 return self.walk(match)
608 return self.walk(match)
604
609
605 class basefilectx(object):
610 class basefilectx(object):
606 """A filecontext object represents the common logic for its children:
611 """A filecontext object represents the common logic for its children:
607 filectx: read-only access to a filerevision that is already present
612 filectx: read-only access to a filerevision that is already present
608 in the repo,
613 in the repo,
609 workingfilectx: a filecontext that represents files from the working
614 workingfilectx: a filecontext that represents files from the working
610 directory,
615 directory,
611 memfilectx: a filecontext that represents files in-memory,
616 memfilectx: a filecontext that represents files in-memory,
612 """
617 """
613 @propertycache
618 @propertycache
614 def _filelog(self):
619 def _filelog(self):
615 return self._repo.file(self._path)
620 return self._repo.file(self._path)
616
621
617 @propertycache
622 @propertycache
618 def _changeid(self):
623 def _changeid(self):
619 if r'_changectx' in self.__dict__:
624 if r'_changectx' in self.__dict__:
620 return self._changectx.rev()
625 return self._changectx.rev()
621 elif r'_descendantrev' in self.__dict__:
626 elif r'_descendantrev' in self.__dict__:
622 # this file context was created from a revision with a known
627 # this file context was created from a revision with a known
623 # descendant, we can (lazily) correct for linkrev aliases
628 # descendant, we can (lazily) correct for linkrev aliases
624 return self._adjustlinkrev(self._descendantrev)
629 return self._adjustlinkrev(self._descendantrev)
625 else:
630 else:
626 return self._filelog.linkrev(self._filerev)
631 return self._filelog.linkrev(self._filerev)
627
632
628 @propertycache
633 @propertycache
629 def _filenode(self):
634 def _filenode(self):
630 if r'_fileid' in self.__dict__:
635 if r'_fileid' in self.__dict__:
631 return self._filelog.lookup(self._fileid)
636 return self._filelog.lookup(self._fileid)
632 else:
637 else:
633 return self._changectx.filenode(self._path)
638 return self._changectx.filenode(self._path)
634
639
635 @propertycache
640 @propertycache
636 def _filerev(self):
641 def _filerev(self):
637 return self._filelog.rev(self._filenode)
642 return self._filelog.rev(self._filenode)
638
643
639 @propertycache
644 @propertycache
640 def _repopath(self):
645 def _repopath(self):
641 return self._path
646 return self._path
642
647
643 def __nonzero__(self):
648 def __nonzero__(self):
644 try:
649 try:
645 self._filenode
650 self._filenode
646 return True
651 return True
647 except error.LookupError:
652 except error.LookupError:
648 # file is missing
653 # file is missing
649 return False
654 return False
650
655
651 __bool__ = __nonzero__
656 __bool__ = __nonzero__
652
657
653 def __bytes__(self):
658 def __bytes__(self):
654 try:
659 try:
655 return "%s@%s" % (self.path(), self._changectx)
660 return "%s@%s" % (self.path(), self._changectx)
656 except error.LookupError:
661 except error.LookupError:
657 return "%s@???" % self.path()
662 return "%s@???" % self.path()
658
663
659 __str__ = encoding.strmethod(__bytes__)
664 __str__ = encoding.strmethod(__bytes__)
660
665
661 def __repr__(self):
666 def __repr__(self):
662 return r"<%s %s>" % (type(self).__name__, str(self))
667 return r"<%s %s>" % (type(self).__name__, str(self))
663
668
664 def __hash__(self):
669 def __hash__(self):
665 try:
670 try:
666 return hash((self._path, self._filenode))
671 return hash((self._path, self._filenode))
667 except AttributeError:
672 except AttributeError:
668 return id(self)
673 return id(self)
669
674
670 def __eq__(self, other):
675 def __eq__(self, other):
671 try:
676 try:
672 return (type(self) == type(other) and self._path == other._path
677 return (type(self) == type(other) and self._path == other._path
673 and self._filenode == other._filenode)
678 and self._filenode == other._filenode)
674 except AttributeError:
679 except AttributeError:
675 return False
680 return False
676
681
677 def __ne__(self, other):
682 def __ne__(self, other):
678 return not (self == other)
683 return not (self == other)
679
684
680 def filerev(self):
685 def filerev(self):
681 return self._filerev
686 return self._filerev
682 def filenode(self):
687 def filenode(self):
683 return self._filenode
688 return self._filenode
684 @propertycache
689 @propertycache
685 def _flags(self):
690 def _flags(self):
686 return self._changectx.flags(self._path)
691 return self._changectx.flags(self._path)
687 def flags(self):
692 def flags(self):
688 return self._flags
693 return self._flags
689 def filelog(self):
694 def filelog(self):
690 return self._filelog
695 return self._filelog
691 def rev(self):
696 def rev(self):
692 return self._changeid
697 return self._changeid
693 def linkrev(self):
698 def linkrev(self):
694 return self._filelog.linkrev(self._filerev)
699 return self._filelog.linkrev(self._filerev)
695 def node(self):
700 def node(self):
696 return self._changectx.node()
701 return self._changectx.node()
697 def hex(self):
702 def hex(self):
698 return self._changectx.hex()
703 return self._changectx.hex()
699 def user(self):
704 def user(self):
700 return self._changectx.user()
705 return self._changectx.user()
701 def date(self):
706 def date(self):
702 return self._changectx.date()
707 return self._changectx.date()
703 def files(self):
708 def files(self):
704 return self._changectx.files()
709 return self._changectx.files()
705 def description(self):
710 def description(self):
706 return self._changectx.description()
711 return self._changectx.description()
707 def branch(self):
712 def branch(self):
708 return self._changectx.branch()
713 return self._changectx.branch()
709 def extra(self):
714 def extra(self):
710 return self._changectx.extra()
715 return self._changectx.extra()
711 def phase(self):
716 def phase(self):
712 return self._changectx.phase()
717 return self._changectx.phase()
713 def phasestr(self):
718 def phasestr(self):
714 return self._changectx.phasestr()
719 return self._changectx.phasestr()
715 def obsolete(self):
720 def obsolete(self):
716 return self._changectx.obsolete()
721 return self._changectx.obsolete()
717 def instabilities(self):
722 def instabilities(self):
718 return self._changectx.instabilities()
723 return self._changectx.instabilities()
719 def manifest(self):
724 def manifest(self):
720 return self._changectx.manifest()
725 return self._changectx.manifest()
721 def changectx(self):
726 def changectx(self):
722 return self._changectx
727 return self._changectx
723 def renamed(self):
728 def renamed(self):
724 return self._copied
729 return self._copied
725 def copysource(self):
730 def copysource(self):
726 return self._copied and self._copied[0]
731 return self._copied and self._copied[0]
727 def repo(self):
732 def repo(self):
728 return self._repo
733 return self._repo
729 def size(self):
734 def size(self):
730 return len(self.data())
735 return len(self.data())
731
736
732 def path(self):
737 def path(self):
733 return self._path
738 return self._path
734
739
735 def isbinary(self):
740 def isbinary(self):
736 try:
741 try:
737 return stringutil.binary(self.data())
742 return stringutil.binary(self.data())
738 except IOError:
743 except IOError:
739 return False
744 return False
740 def isexec(self):
745 def isexec(self):
741 return 'x' in self.flags()
746 return 'x' in self.flags()
742 def islink(self):
747 def islink(self):
743 return 'l' in self.flags()
748 return 'l' in self.flags()
744
749
745 def isabsent(self):
750 def isabsent(self):
746 """whether this filectx represents a file not in self._changectx
751 """whether this filectx represents a file not in self._changectx
747
752
748 This is mainly for merge code to detect change/delete conflicts. This is
753 This is mainly for merge code to detect change/delete conflicts. This is
749 expected to be True for all subclasses of basectx."""
754 expected to be True for all subclasses of basectx."""
750 return False
755 return False
751
756
752 _customcmp = False
757 _customcmp = False
753 def cmp(self, fctx):
758 def cmp(self, fctx):
754 """compare with other file context
759 """compare with other file context
755
760
756 returns True if different than fctx.
761 returns True if different than fctx.
757 """
762 """
758 if fctx._customcmp:
763 if fctx._customcmp:
759 return fctx.cmp(self)
764 return fctx.cmp(self)
760
765
761 if self._filenode is None:
766 if self._filenode is None:
762 raise error.ProgrammingError(
767 raise error.ProgrammingError(
763 'filectx.cmp() must be reimplemented if not backed by revlog')
768 'filectx.cmp() must be reimplemented if not backed by revlog')
764
769
765 if fctx._filenode is None:
770 if fctx._filenode is None:
766 if self._repo._encodefilterpats:
771 if self._repo._encodefilterpats:
767 # can't rely on size() because wdir content may be decoded
772 # can't rely on size() because wdir content may be decoded
768 return self._filelog.cmp(self._filenode, fctx.data())
773 return self._filelog.cmp(self._filenode, fctx.data())
769 if self.size() - 4 == fctx.size():
774 if self.size() - 4 == fctx.size():
770 # size() can match:
775 # size() can match:
771 # if file data starts with '\1\n', empty metadata block is
776 # if file data starts with '\1\n', empty metadata block is
772 # prepended, which adds 4 bytes to filelog.size().
777 # prepended, which adds 4 bytes to filelog.size().
773 return self._filelog.cmp(self._filenode, fctx.data())
778 return self._filelog.cmp(self._filenode, fctx.data())
774 if self.size() == fctx.size():
779 if self.size() == fctx.size():
775 # size() matches: need to compare content
780 # size() matches: need to compare content
776 return self._filelog.cmp(self._filenode, fctx.data())
781 return self._filelog.cmp(self._filenode, fctx.data())
777
782
778 # size() differs
783 # size() differs
779 return True
784 return True
780
785
781 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
786 def _adjustlinkrev(self, srcrev, inclusive=False, stoprev=None):
782 """return the first ancestor of <srcrev> introducing <fnode>
787 """return the first ancestor of <srcrev> introducing <fnode>
783
788
784 If the linkrev of the file revision does not point to an ancestor of
789 If the linkrev of the file revision does not point to an ancestor of
785 srcrev, we'll walk down the ancestors until we find one introducing
790 srcrev, we'll walk down the ancestors until we find one introducing
786 this file revision.
791 this file revision.
787
792
788 :srcrev: the changeset revision we search ancestors from
793 :srcrev: the changeset revision we search ancestors from
789 :inclusive: if true, the src revision will also be checked
794 :inclusive: if true, the src revision will also be checked
790 :stoprev: an optional revision to stop the walk at. If no introduction
795 :stoprev: an optional revision to stop the walk at. If no introduction
791 of this file content could be found before this floor
796 of this file content could be found before this floor
792 revision, the function will returns "None" and stops its
797 revision, the function will returns "None" and stops its
793 iteration.
798 iteration.
794 """
799 """
795 repo = self._repo
800 repo = self._repo
796 cl = repo.unfiltered().changelog
801 cl = repo.unfiltered().changelog
797 mfl = repo.manifestlog
802 mfl = repo.manifestlog
798 # fetch the linkrev
803 # fetch the linkrev
799 lkr = self.linkrev()
804 lkr = self.linkrev()
800 if srcrev == lkr:
805 if srcrev == lkr:
801 return lkr
806 return lkr
802 # hack to reuse ancestor computation when searching for renames
807 # hack to reuse ancestor computation when searching for renames
803 memberanc = getattr(self, '_ancestrycontext', None)
808 memberanc = getattr(self, '_ancestrycontext', None)
804 iteranc = None
809 iteranc = None
805 if srcrev is None:
810 if srcrev is None:
806 # wctx case, used by workingfilectx during mergecopy
811 # wctx case, used by workingfilectx during mergecopy
807 revs = [p.rev() for p in self._repo[None].parents()]
812 revs = [p.rev() for p in self._repo[None].parents()]
808 inclusive = True # we skipped the real (revless) source
813 inclusive = True # we skipped the real (revless) source
809 else:
814 else:
810 revs = [srcrev]
815 revs = [srcrev]
811 if memberanc is None:
816 if memberanc is None:
812 memberanc = iteranc = cl.ancestors(revs, lkr,
817 memberanc = iteranc = cl.ancestors(revs, lkr,
813 inclusive=inclusive)
818 inclusive=inclusive)
814 # check if this linkrev is an ancestor of srcrev
819 # check if this linkrev is an ancestor of srcrev
815 if lkr not in memberanc:
820 if lkr not in memberanc:
816 if iteranc is None:
821 if iteranc is None:
817 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
822 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
818 fnode = self._filenode
823 fnode = self._filenode
819 path = self._path
824 path = self._path
820 for a in iteranc:
825 for a in iteranc:
821 if stoprev is not None and a < stoprev:
826 if stoprev is not None and a < stoprev:
822 return None
827 return None
823 ac = cl.read(a) # get changeset data (we avoid object creation)
828 ac = cl.read(a) # get changeset data (we avoid object creation)
824 if path in ac[3]: # checking the 'files' field.
829 if path in ac[3]: # checking the 'files' field.
825 # The file has been touched, check if the content is
830 # The file has been touched, check if the content is
826 # similar to the one we search for.
831 # similar to the one we search for.
827 if fnode == mfl[ac[0]].readfast().get(path):
832 if fnode == mfl[ac[0]].readfast().get(path):
828 return a
833 return a
829 # In theory, we should never get out of that loop without a result.
834 # In theory, we should never get out of that loop without a result.
830 # But if manifest uses a buggy file revision (not children of the
835 # But if manifest uses a buggy file revision (not children of the
831 # one it replaces) we could. Such a buggy situation will likely
836 # one it replaces) we could. Such a buggy situation will likely
832 # result is crash somewhere else at to some point.
837 # result is crash somewhere else at to some point.
833 return lkr
838 return lkr
834
839
835 def isintroducedafter(self, changelogrev):
840 def isintroducedafter(self, changelogrev):
836 """True if a filectx has been introduced after a given floor revision
841 """True if a filectx has been introduced after a given floor revision
837 """
842 """
838 if self.linkrev() >= changelogrev:
843 if self.linkrev() >= changelogrev:
839 return True
844 return True
840 introrev = self._introrev(stoprev=changelogrev)
845 introrev = self._introrev(stoprev=changelogrev)
841 if introrev is None:
846 if introrev is None:
842 return False
847 return False
843 return introrev >= changelogrev
848 return introrev >= changelogrev
844
849
845 def introrev(self):
850 def introrev(self):
846 """return the rev of the changeset which introduced this file revision
851 """return the rev of the changeset which introduced this file revision
847
852
848 This method is different from linkrev because it take into account the
853 This method is different from linkrev because it take into account the
849 changeset the filectx was created from. It ensures the returned
854 changeset the filectx was created from. It ensures the returned
850 revision is one of its ancestors. This prevents bugs from
855 revision is one of its ancestors. This prevents bugs from
851 'linkrev-shadowing' when a file revision is used by multiple
856 'linkrev-shadowing' when a file revision is used by multiple
852 changesets.
857 changesets.
853 """
858 """
854 return self._introrev()
859 return self._introrev()
855
860
856 def _introrev(self, stoprev=None):
861 def _introrev(self, stoprev=None):
857 """
862 """
858 Same as `introrev` but, with an extra argument to limit changelog
863 Same as `introrev` but, with an extra argument to limit changelog
859 iteration range in some internal usecase.
864 iteration range in some internal usecase.
860
865
861 If `stoprev` is set, the `introrev` will not be searched past that
866 If `stoprev` is set, the `introrev` will not be searched past that
862 `stoprev` revision and "None" might be returned. This is useful to
867 `stoprev` revision and "None" might be returned. This is useful to
863 limit the iteration range.
868 limit the iteration range.
864 """
869 """
865 toprev = None
870 toprev = None
866 attrs = vars(self)
871 attrs = vars(self)
867 if r'_changeid' in attrs:
872 if r'_changeid' in attrs:
868 # We have a cached value already
873 # We have a cached value already
869 toprev = self._changeid
874 toprev = self._changeid
870 elif r'_changectx' in attrs:
875 elif r'_changectx' in attrs:
871 # We know which changelog entry we are coming from
876 # We know which changelog entry we are coming from
872 toprev = self._changectx.rev()
877 toprev = self._changectx.rev()
873
878
874 if toprev is not None:
879 if toprev is not None:
875 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
880 return self._adjustlinkrev(toprev, inclusive=True, stoprev=stoprev)
876 elif r'_descendantrev' in attrs:
881 elif r'_descendantrev' in attrs:
877 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
882 introrev = self._adjustlinkrev(self._descendantrev, stoprev=stoprev)
878 # be nice and cache the result of the computation
883 # be nice and cache the result of the computation
879 if introrev is not None:
884 if introrev is not None:
880 self._changeid = introrev
885 self._changeid = introrev
881 return introrev
886 return introrev
882 else:
887 else:
883 return self.linkrev()
888 return self.linkrev()
884
889
885 def introfilectx(self):
890 def introfilectx(self):
886 """Return filectx having identical contents, but pointing to the
891 """Return filectx having identical contents, but pointing to the
887 changeset revision where this filectx was introduced"""
892 changeset revision where this filectx was introduced"""
888 introrev = self.introrev()
893 introrev = self.introrev()
889 if self.rev() == introrev:
894 if self.rev() == introrev:
890 return self
895 return self
891 return self.filectx(self.filenode(), changeid=introrev)
896 return self.filectx(self.filenode(), changeid=introrev)
892
897
893 def _parentfilectx(self, path, fileid, filelog):
898 def _parentfilectx(self, path, fileid, filelog):
894 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
899 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
895 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
900 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
896 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
901 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
897 # If self is associated with a changeset (probably explicitly
902 # If self is associated with a changeset (probably explicitly
898 # fed), ensure the created filectx is associated with a
903 # fed), ensure the created filectx is associated with a
899 # changeset that is an ancestor of self.changectx.
904 # changeset that is an ancestor of self.changectx.
900 # This lets us later use _adjustlinkrev to get a correct link.
905 # This lets us later use _adjustlinkrev to get a correct link.
901 fctx._descendantrev = self.rev()
906 fctx._descendantrev = self.rev()
902 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
907 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
903 elif r'_descendantrev' in vars(self):
908 elif r'_descendantrev' in vars(self):
904 # Otherwise propagate _descendantrev if we have one associated.
909 # Otherwise propagate _descendantrev if we have one associated.
905 fctx._descendantrev = self._descendantrev
910 fctx._descendantrev = self._descendantrev
906 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
911 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
907 return fctx
912 return fctx
908
913
909 def parents(self):
914 def parents(self):
910 _path = self._path
915 _path = self._path
911 fl = self._filelog
916 fl = self._filelog
912 parents = self._filelog.parents(self._filenode)
917 parents = self._filelog.parents(self._filenode)
913 pl = [(_path, node, fl) for node in parents if node != nullid]
918 pl = [(_path, node, fl) for node in parents if node != nullid]
914
919
915 r = fl.renamed(self._filenode)
920 r = fl.renamed(self._filenode)
916 if r:
921 if r:
917 # - In the simple rename case, both parent are nullid, pl is empty.
922 # - In the simple rename case, both parent are nullid, pl is empty.
918 # - In case of merge, only one of the parent is null id and should
923 # - In case of merge, only one of the parent is null id and should
919 # be replaced with the rename information. This parent is -always-
924 # be replaced with the rename information. This parent is -always-
920 # the first one.
925 # the first one.
921 #
926 #
922 # As null id have always been filtered out in the previous list
927 # As null id have always been filtered out in the previous list
923 # comprehension, inserting to 0 will always result in "replacing
928 # comprehension, inserting to 0 will always result in "replacing
924 # first nullid parent with rename information.
929 # first nullid parent with rename information.
925 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
930 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
926
931
927 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
932 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
928
933
929 def p1(self):
934 def p1(self):
930 return self.parents()[0]
935 return self.parents()[0]
931
936
932 def p2(self):
937 def p2(self):
933 p = self.parents()
938 p = self.parents()
934 if len(p) == 2:
939 if len(p) == 2:
935 return p[1]
940 return p[1]
936 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
941 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
937
942
938 def annotate(self, follow=False, skiprevs=None, diffopts=None):
943 def annotate(self, follow=False, skiprevs=None, diffopts=None):
939 """Returns a list of annotateline objects for each line in the file
944 """Returns a list of annotateline objects for each line in the file
940
945
941 - line.fctx is the filectx of the node where that line was last changed
946 - line.fctx is the filectx of the node where that line was last changed
942 - line.lineno is the line number at the first appearance in the managed
947 - line.lineno is the line number at the first appearance in the managed
943 file
948 file
944 - line.text is the data on that line (including newline character)
949 - line.text is the data on that line (including newline character)
945 """
950 """
946 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
951 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
947
952
948 def parents(f):
953 def parents(f):
949 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
954 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
950 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
955 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
951 # from the topmost introrev (= srcrev) down to p.linkrev() if it
956 # from the topmost introrev (= srcrev) down to p.linkrev() if it
952 # isn't an ancestor of the srcrev.
957 # isn't an ancestor of the srcrev.
953 f._changeid
958 f._changeid
954 pl = f.parents()
959 pl = f.parents()
955
960
956 # Don't return renamed parents if we aren't following.
961 # Don't return renamed parents if we aren't following.
957 if not follow:
962 if not follow:
958 pl = [p for p in pl if p.path() == f.path()]
963 pl = [p for p in pl if p.path() == f.path()]
959
964
960 # renamed filectx won't have a filelog yet, so set it
965 # renamed filectx won't have a filelog yet, so set it
961 # from the cache to save time
966 # from the cache to save time
962 for p in pl:
967 for p in pl:
963 if not r'_filelog' in p.__dict__:
968 if not r'_filelog' in p.__dict__:
964 p._filelog = getlog(p.path())
969 p._filelog = getlog(p.path())
965
970
966 return pl
971 return pl
967
972
968 # use linkrev to find the first changeset where self appeared
973 # use linkrev to find the first changeset where self appeared
969 base = self.introfilectx()
974 base = self.introfilectx()
970 if getattr(base, '_ancestrycontext', None) is None:
975 if getattr(base, '_ancestrycontext', None) is None:
971 cl = self._repo.changelog
976 cl = self._repo.changelog
972 if base.rev() is None:
977 if base.rev() is None:
973 # wctx is not inclusive, but works because _ancestrycontext
978 # wctx is not inclusive, but works because _ancestrycontext
974 # is used to test filelog revisions
979 # is used to test filelog revisions
975 ac = cl.ancestors([p.rev() for p in base.parents()],
980 ac = cl.ancestors([p.rev() for p in base.parents()],
976 inclusive=True)
981 inclusive=True)
977 else:
982 else:
978 ac = cl.ancestors([base.rev()], inclusive=True)
983 ac = cl.ancestors([base.rev()], inclusive=True)
979 base._ancestrycontext = ac
984 base._ancestrycontext = ac
980
985
981 return dagop.annotate(base, parents, skiprevs=skiprevs,
986 return dagop.annotate(base, parents, skiprevs=skiprevs,
982 diffopts=diffopts)
987 diffopts=diffopts)
983
988
984 def ancestors(self, followfirst=False):
989 def ancestors(self, followfirst=False):
985 visit = {}
990 visit = {}
986 c = self
991 c = self
987 if followfirst:
992 if followfirst:
988 cut = 1
993 cut = 1
989 else:
994 else:
990 cut = None
995 cut = None
991
996
992 while True:
997 while True:
993 for parent in c.parents()[:cut]:
998 for parent in c.parents()[:cut]:
994 visit[(parent.linkrev(), parent.filenode())] = parent
999 visit[(parent.linkrev(), parent.filenode())] = parent
995 if not visit:
1000 if not visit:
996 break
1001 break
997 c = visit.pop(max(visit))
1002 c = visit.pop(max(visit))
998 yield c
1003 yield c
999
1004
1000 def decodeddata(self):
1005 def decodeddata(self):
1001 """Returns `data()` after running repository decoding filters.
1006 """Returns `data()` after running repository decoding filters.
1002
1007
1003 This is often equivalent to how the data would be expressed on disk.
1008 This is often equivalent to how the data would be expressed on disk.
1004 """
1009 """
1005 return self._repo.wwritedata(self.path(), self.data())
1010 return self._repo.wwritedata(self.path(), self.data())
1006
1011
1007 class filectx(basefilectx):
1012 class filectx(basefilectx):
1008 """A filecontext object makes access to data related to a particular
1013 """A filecontext object makes access to data related to a particular
1009 filerevision convenient."""
1014 filerevision convenient."""
1010 def __init__(self, repo, path, changeid=None, fileid=None,
1015 def __init__(self, repo, path, changeid=None, fileid=None,
1011 filelog=None, changectx=None):
1016 filelog=None, changectx=None):
1012 """changeid must be a revision number, if specified.
1017 """changeid must be a revision number, if specified.
1013 fileid can be a file revision or node."""
1018 fileid can be a file revision or node."""
1014 self._repo = repo
1019 self._repo = repo
1015 self._path = path
1020 self._path = path
1016
1021
1017 assert (changeid is not None
1022 assert (changeid is not None
1018 or fileid is not None
1023 or fileid is not None
1019 or changectx is not None), (
1024 or changectx is not None), (
1020 "bad args: changeid=%r, fileid=%r, changectx=%r"
1025 "bad args: changeid=%r, fileid=%r, changectx=%r"
1021 % (changeid, fileid, changectx))
1026 % (changeid, fileid, changectx))
1022
1027
1023 if filelog is not None:
1028 if filelog is not None:
1024 self._filelog = filelog
1029 self._filelog = filelog
1025
1030
1026 if changeid is not None:
1031 if changeid is not None:
1027 self._changeid = changeid
1032 self._changeid = changeid
1028 if changectx is not None:
1033 if changectx is not None:
1029 self._changectx = changectx
1034 self._changectx = changectx
1030 if fileid is not None:
1035 if fileid is not None:
1031 self._fileid = fileid
1036 self._fileid = fileid
1032
1037
1033 @propertycache
1038 @propertycache
1034 def _changectx(self):
1039 def _changectx(self):
1035 try:
1040 try:
1036 return self._repo[self._changeid]
1041 return self._repo[self._changeid]
1037 except error.FilteredRepoLookupError:
1042 except error.FilteredRepoLookupError:
1038 # Linkrev may point to any revision in the repository. When the
1043 # Linkrev may point to any revision in the repository. When the
1039 # repository is filtered this may lead to `filectx` trying to build
1044 # repository is filtered this may lead to `filectx` trying to build
1040 # `changectx` for filtered revision. In such case we fallback to
1045 # `changectx` for filtered revision. In such case we fallback to
1041 # creating `changectx` on the unfiltered version of the reposition.
1046 # creating `changectx` on the unfiltered version of the reposition.
1042 # This fallback should not be an issue because `changectx` from
1047 # This fallback should not be an issue because `changectx` from
1043 # `filectx` are not used in complex operations that care about
1048 # `filectx` are not used in complex operations that care about
1044 # filtering.
1049 # filtering.
1045 #
1050 #
1046 # This fallback is a cheap and dirty fix that prevent several
1051 # This fallback is a cheap and dirty fix that prevent several
1047 # crashes. It does not ensure the behavior is correct. However the
1052 # crashes. It does not ensure the behavior is correct. However the
1048 # behavior was not correct before filtering either and "incorrect
1053 # behavior was not correct before filtering either and "incorrect
1049 # behavior" is seen as better as "crash"
1054 # behavior" is seen as better as "crash"
1050 #
1055 #
1051 # Linkrevs have several serious troubles with filtering that are
1056 # Linkrevs have several serious troubles with filtering that are
1052 # complicated to solve. Proper handling of the issue here should be
1057 # complicated to solve. Proper handling of the issue here should be
1053 # considered when solving linkrev issue are on the table.
1058 # considered when solving linkrev issue are on the table.
1054 return self._repo.unfiltered()[self._changeid]
1059 return self._repo.unfiltered()[self._changeid]
1055
1060
1056 def filectx(self, fileid, changeid=None):
1061 def filectx(self, fileid, changeid=None):
1057 '''opens an arbitrary revision of the file without
1062 '''opens an arbitrary revision of the file without
1058 opening a new filelog'''
1063 opening a new filelog'''
1059 return filectx(self._repo, self._path, fileid=fileid,
1064 return filectx(self._repo, self._path, fileid=fileid,
1060 filelog=self._filelog, changeid=changeid)
1065 filelog=self._filelog, changeid=changeid)
1061
1066
1062 def rawdata(self):
1067 def rawdata(self):
1063 return self._filelog.rawdata(self._filenode)
1068 return self._filelog.rawdata(self._filenode)
1064
1069
1065 def rawflags(self):
1070 def rawflags(self):
1066 """low-level revlog flags"""
1071 """low-level revlog flags"""
1067 return self._filelog.flags(self._filerev)
1072 return self._filelog.flags(self._filerev)
1068
1073
1069 def data(self):
1074 def data(self):
1070 try:
1075 try:
1071 return self._filelog.read(self._filenode)
1076 return self._filelog.read(self._filenode)
1072 except error.CensoredNodeError:
1077 except error.CensoredNodeError:
1073 if self._repo.ui.config("censor", "policy") == "ignore":
1078 if self._repo.ui.config("censor", "policy") == "ignore":
1074 return ""
1079 return ""
1075 raise error.Abort(_("censored node: %s") % short(self._filenode),
1080 raise error.Abort(_("censored node: %s") % short(self._filenode),
1076 hint=_("set censor.policy to ignore errors"))
1081 hint=_("set censor.policy to ignore errors"))
1077
1082
1078 def size(self):
1083 def size(self):
1079 return self._filelog.size(self._filerev)
1084 return self._filelog.size(self._filerev)
1080
1085
1081 @propertycache
1086 @propertycache
1082 def _copied(self):
1087 def _copied(self):
1083 """check if file was actually renamed in this changeset revision
1088 """check if file was actually renamed in this changeset revision
1084
1089
1085 If rename logged in file revision, we report copy for changeset only
1090 If rename logged in file revision, we report copy for changeset only
1086 if file revisions linkrev points back to the changeset in question
1091 if file revisions linkrev points back to the changeset in question
1087 or both changeset parents contain different file revisions.
1092 or both changeset parents contain different file revisions.
1088 """
1093 """
1089
1094
1090 renamed = self._filelog.renamed(self._filenode)
1095 renamed = self._filelog.renamed(self._filenode)
1091 if not renamed:
1096 if not renamed:
1092 return None
1097 return None
1093
1098
1094 if self.rev() == self.linkrev():
1099 if self.rev() == self.linkrev():
1095 return renamed
1100 return renamed
1096
1101
1097 name = self.path()
1102 name = self.path()
1098 fnode = self._filenode
1103 fnode = self._filenode
1099 for p in self._changectx.parents():
1104 for p in self._changectx.parents():
1100 try:
1105 try:
1101 if fnode == p.filenode(name):
1106 if fnode == p.filenode(name):
1102 return None
1107 return None
1103 except error.LookupError:
1108 except error.LookupError:
1104 pass
1109 pass
1105 return renamed
1110 return renamed
1106
1111
1107 def children(self):
1112 def children(self):
1108 # hard for renames
1113 # hard for renames
1109 c = self._filelog.children(self._filenode)
1114 c = self._filelog.children(self._filenode)
1110 return [filectx(self._repo, self._path, fileid=x,
1115 return [filectx(self._repo, self._path, fileid=x,
1111 filelog=self._filelog) for x in c]
1116 filelog=self._filelog) for x in c]
1112
1117
1113 class committablectx(basectx):
1118 class committablectx(basectx):
1114 """A committablectx object provides common functionality for a context that
1119 """A committablectx object provides common functionality for a context that
1115 wants the ability to commit, e.g. workingctx or memctx."""
1120 wants the ability to commit, e.g. workingctx or memctx."""
1116 def __init__(self, repo, text="", user=None, date=None, extra=None,
1121 def __init__(self, repo, text="", user=None, date=None, extra=None,
1117 changes=None, branch=None):
1122 changes=None, branch=None):
1118 super(committablectx, self).__init__(repo)
1123 super(committablectx, self).__init__(repo)
1119 self._rev = None
1124 self._rev = None
1120 self._node = None
1125 self._node = None
1121 self._text = text
1126 self._text = text
1122 if date:
1127 if date:
1123 self._date = dateutil.parsedate(date)
1128 self._date = dateutil.parsedate(date)
1124 if user:
1129 if user:
1125 self._user = user
1130 self._user = user
1126 if changes:
1131 if changes:
1127 self._status = changes
1132 self._status = changes
1128
1133
1129 self._extra = {}
1134 self._extra = {}
1130 if extra:
1135 if extra:
1131 self._extra = extra.copy()
1136 self._extra = extra.copy()
1132 if branch is not None:
1137 if branch is not None:
1133 self._extra['branch'] = encoding.fromlocal(branch)
1138 self._extra['branch'] = encoding.fromlocal(branch)
1134 if not self._extra.get('branch'):
1139 if not self._extra.get('branch'):
1135 self._extra['branch'] = 'default'
1140 self._extra['branch'] = 'default'
1136
1141
1137 def __bytes__(self):
1142 def __bytes__(self):
1138 return bytes(self._parents[0]) + "+"
1143 return bytes(self._parents[0]) + "+"
1139
1144
1140 __str__ = encoding.strmethod(__bytes__)
1145 __str__ = encoding.strmethod(__bytes__)
1141
1146
1142 def __nonzero__(self):
1147 def __nonzero__(self):
1143 return True
1148 return True
1144
1149
1145 __bool__ = __nonzero__
1150 __bool__ = __nonzero__
1146
1151
1147 @propertycache
1152 @propertycache
1148 def _status(self):
1153 def _status(self):
1149 return self._repo.status()
1154 return self._repo.status()
1150
1155
1151 @propertycache
1156 @propertycache
1152 def _user(self):
1157 def _user(self):
1153 return self._repo.ui.username()
1158 return self._repo.ui.username()
1154
1159
1155 @propertycache
1160 @propertycache
1156 def _date(self):
1161 def _date(self):
1157 ui = self._repo.ui
1162 ui = self._repo.ui
1158 date = ui.configdate('devel', 'default-date')
1163 date = ui.configdate('devel', 'default-date')
1159 if date is None:
1164 if date is None:
1160 date = dateutil.makedate()
1165 date = dateutil.makedate()
1161 return date
1166 return date
1162
1167
1163 def subrev(self, subpath):
1168 def subrev(self, subpath):
1164 return None
1169 return None
1165
1170
1166 def manifestnode(self):
1171 def manifestnode(self):
1167 return None
1172 return None
1168 def user(self):
1173 def user(self):
1169 return self._user or self._repo.ui.username()
1174 return self._user or self._repo.ui.username()
1170 def date(self):
1175 def date(self):
1171 return self._date
1176 return self._date
1172 def description(self):
1177 def description(self):
1173 return self._text
1178 return self._text
1174 def files(self):
1179 def files(self):
1175 return sorted(self._status.modified + self._status.added +
1180 return sorted(self._status.modified + self._status.added +
1176 self._status.removed)
1181 self._status.removed)
1177 def modified(self):
1182 def modified(self):
1178 return self._status.modified
1183 return self._status.modified
1179 def added(self):
1184 def added(self):
1180 return self._status.added
1185 return self._status.added
1181 def removed(self):
1186 def removed(self):
1182 return self._status.removed
1187 return self._status.removed
1183 def deleted(self):
1188 def deleted(self):
1184 return self._status.deleted
1189 return self._status.deleted
1185 filesmodified = modified
1190 filesmodified = modified
1186 filesadded = added
1191 filesadded = added
1187 filesremoved = removed
1192 filesremoved = removed
1188
1193
1189 def branch(self):
1194 def branch(self):
1190 return encoding.tolocal(self._extra['branch'])
1195 return encoding.tolocal(self._extra['branch'])
1191 def closesbranch(self):
1196 def closesbranch(self):
1192 return 'close' in self._extra
1197 return 'close' in self._extra
1193 def extra(self):
1198 def extra(self):
1194 return self._extra
1199 return self._extra
1195
1200
1196 def isinmemory(self):
1201 def isinmemory(self):
1197 return False
1202 return False
1198
1203
1199 def tags(self):
1204 def tags(self):
1200 return []
1205 return []
1201
1206
1202 def bookmarks(self):
1207 def bookmarks(self):
1203 b = []
1208 b = []
1204 for p in self.parents():
1209 for p in self.parents():
1205 b.extend(p.bookmarks())
1210 b.extend(p.bookmarks())
1206 return b
1211 return b
1207
1212
1208 def phase(self):
1213 def phase(self):
1209 phase = phases.draft # default phase to draft
1214 phase = phases.draft # default phase to draft
1210 for p in self.parents():
1215 for p in self.parents():
1211 phase = max(phase, p.phase())
1216 phase = max(phase, p.phase())
1212 return phase
1217 return phase
1213
1218
1214 def hidden(self):
1219 def hidden(self):
1215 return False
1220 return False
1216
1221
1217 def children(self):
1222 def children(self):
1218 return []
1223 return []
1219
1224
1220 def ancestor(self, c2):
1225 def ancestor(self, c2):
1221 """return the "best" ancestor context of self and c2"""
1226 """return the "best" ancestor context of self and c2"""
1222 return self._parents[0].ancestor(c2) # punt on two parents for now
1227 return self._parents[0].ancestor(c2) # punt on two parents for now
1223
1228
1224 def ancestors(self):
1229 def ancestors(self):
1225 for p in self._parents:
1230 for p in self._parents:
1226 yield p
1231 yield p
1227 for a in self._repo.changelog.ancestors(
1232 for a in self._repo.changelog.ancestors(
1228 [p.rev() for p in self._parents]):
1233 [p.rev() for p in self._parents]):
1229 yield self._repo[a]
1234 yield self._repo[a]
1230
1235
1231 def markcommitted(self, node):
1236 def markcommitted(self, node):
1232 """Perform post-commit cleanup necessary after committing this ctx
1237 """Perform post-commit cleanup necessary after committing this ctx
1233
1238
1234 Specifically, this updates backing stores this working context
1239 Specifically, this updates backing stores this working context
1235 wraps to reflect the fact that the changes reflected by this
1240 wraps to reflect the fact that the changes reflected by this
1236 workingctx have been committed. For example, it marks
1241 workingctx have been committed. For example, it marks
1237 modified and added files as normal in the dirstate.
1242 modified and added files as normal in the dirstate.
1238
1243
1239 """
1244 """
1240
1245
1241 def dirty(self, missing=False, merge=True, branch=True):
1246 def dirty(self, missing=False, merge=True, branch=True):
1242 return False
1247 return False
1243
1248
1244 class workingctx(committablectx):
1249 class workingctx(committablectx):
1245 """A workingctx object makes access to data related to
1250 """A workingctx object makes access to data related to
1246 the current working directory convenient.
1251 the current working directory convenient.
1247 date - any valid date string or (unixtime, offset), or None.
1252 date - any valid date string or (unixtime, offset), or None.
1248 user - username string, or None.
1253 user - username string, or None.
1249 extra - a dictionary of extra values, or None.
1254 extra - a dictionary of extra values, or None.
1250 changes - a list of file lists as returned by localrepo.status()
1255 changes - a list of file lists as returned by localrepo.status()
1251 or None to use the repository status.
1256 or None to use the repository status.
1252 """
1257 """
1253 def __init__(self, repo, text="", user=None, date=None, extra=None,
1258 def __init__(self, repo, text="", user=None, date=None, extra=None,
1254 changes=None):
1259 changes=None):
1255 branch = None
1260 branch = None
1256 if not extra or 'branch' not in extra:
1261 if not extra or 'branch' not in extra:
1257 try:
1262 try:
1258 branch = repo.dirstate.branch()
1263 branch = repo.dirstate.branch()
1259 except UnicodeDecodeError:
1264 except UnicodeDecodeError:
1260 raise error.Abort(_('branch name not in UTF-8!'))
1265 raise error.Abort(_('branch name not in UTF-8!'))
1261 super(workingctx, self).__init__(repo, text, user, date, extra, changes,
1266 super(workingctx, self).__init__(repo, text, user, date, extra, changes,
1262 branch=branch)
1267 branch=branch)
1263
1268
1264 def __iter__(self):
1269 def __iter__(self):
1265 d = self._repo.dirstate
1270 d = self._repo.dirstate
1266 for f in d:
1271 for f in d:
1267 if d[f] != 'r':
1272 if d[f] != 'r':
1268 yield f
1273 yield f
1269
1274
1270 def __contains__(self, key):
1275 def __contains__(self, key):
1271 return self._repo.dirstate[key] not in "?r"
1276 return self._repo.dirstate[key] not in "?r"
1272
1277
1273 def hex(self):
1278 def hex(self):
1274 return wdirhex
1279 return wdirhex
1275
1280
1276 @propertycache
1281 @propertycache
1277 def _parents(self):
1282 def _parents(self):
1278 p = self._repo.dirstate.parents()
1283 p = self._repo.dirstate.parents()
1279 if p[1] == nullid:
1284 if p[1] == nullid:
1280 p = p[:-1]
1285 p = p[:-1]
1281 # use unfiltered repo to delay/avoid loading obsmarkers
1286 # use unfiltered repo to delay/avoid loading obsmarkers
1282 unfi = self._repo.unfiltered()
1287 unfi = self._repo.unfiltered()
1283 return [changectx(self._repo, unfi.changelog.rev(n), n) for n in p]
1288 return [changectx(self._repo, unfi.changelog.rev(n), n) for n in p]
1284
1289
1285 def _fileinfo(self, path):
1290 def _fileinfo(self, path):
1286 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1291 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1287 self._manifest
1292 self._manifest
1288 return super(workingctx, self)._fileinfo(path)
1293 return super(workingctx, self)._fileinfo(path)
1289
1294
1290 def _buildflagfunc(self):
1295 def _buildflagfunc(self):
1291 # Create a fallback function for getting file flags when the
1296 # Create a fallback function for getting file flags when the
1292 # filesystem doesn't support them
1297 # filesystem doesn't support them
1293
1298
1294 copiesget = self._repo.dirstate.copies().get
1299 copiesget = self._repo.dirstate.copies().get
1295 parents = self.parents()
1300 parents = self.parents()
1296 if len(parents) < 2:
1301 if len(parents) < 2:
1297 # when we have one parent, it's easy: copy from parent
1302 # when we have one parent, it's easy: copy from parent
1298 man = parents[0].manifest()
1303 man = parents[0].manifest()
1299 def func(f):
1304 def func(f):
1300 f = copiesget(f, f)
1305 f = copiesget(f, f)
1301 return man.flags(f)
1306 return man.flags(f)
1302 else:
1307 else:
1303 # merges are tricky: we try to reconstruct the unstored
1308 # merges are tricky: we try to reconstruct the unstored
1304 # result from the merge (issue1802)
1309 # result from the merge (issue1802)
1305 p1, p2 = parents
1310 p1, p2 = parents
1306 pa = p1.ancestor(p2)
1311 pa = p1.ancestor(p2)
1307 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1312 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1308
1313
1309 def func(f):
1314 def func(f):
1310 f = copiesget(f, f) # may be wrong for merges with copies
1315 f = copiesget(f, f) # may be wrong for merges with copies
1311 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1316 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1312 if fl1 == fl2:
1317 if fl1 == fl2:
1313 return fl1
1318 return fl1
1314 if fl1 == fla:
1319 if fl1 == fla:
1315 return fl2
1320 return fl2
1316 if fl2 == fla:
1321 if fl2 == fla:
1317 return fl1
1322 return fl1
1318 return '' # punt for conflicts
1323 return '' # punt for conflicts
1319
1324
1320 return func
1325 return func
1321
1326
1322 @propertycache
1327 @propertycache
1323 def _flagfunc(self):
1328 def _flagfunc(self):
1324 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1329 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1325
1330
1326 def flags(self, path):
1331 def flags(self, path):
1327 if r'_manifest' in self.__dict__:
1332 if r'_manifest' in self.__dict__:
1328 try:
1333 try:
1329 return self._manifest.flags(path)
1334 return self._manifest.flags(path)
1330 except KeyError:
1335 except KeyError:
1331 return ''
1336 return ''
1332
1337
1333 try:
1338 try:
1334 return self._flagfunc(path)
1339 return self._flagfunc(path)
1335 except OSError:
1340 except OSError:
1336 return ''
1341 return ''
1337
1342
1338 def filectx(self, path, filelog=None):
1343 def filectx(self, path, filelog=None):
1339 """get a file context from the working directory"""
1344 """get a file context from the working directory"""
1340 return workingfilectx(self._repo, path, workingctx=self,
1345 return workingfilectx(self._repo, path, workingctx=self,
1341 filelog=filelog)
1346 filelog=filelog)
1342
1347
1343 def dirty(self, missing=False, merge=True, branch=True):
1348 def dirty(self, missing=False, merge=True, branch=True):
1344 "check whether a working directory is modified"
1349 "check whether a working directory is modified"
1345 # check subrepos first
1350 # check subrepos first
1346 for s in sorted(self.substate):
1351 for s in sorted(self.substate):
1347 if self.sub(s).dirty(missing=missing):
1352 if self.sub(s).dirty(missing=missing):
1348 return True
1353 return True
1349 # check current working dir
1354 # check current working dir
1350 return ((merge and self.p2()) or
1355 return ((merge and self.p2()) or
1351 (branch and self.branch() != self.p1().branch()) or
1356 (branch and self.branch() != self.p1().branch()) or
1352 self.modified() or self.added() or self.removed() or
1357 self.modified() or self.added() or self.removed() or
1353 (missing and self.deleted()))
1358 (missing and self.deleted()))
1354
1359
1355 def add(self, list, prefix=""):
1360 def add(self, list, prefix=""):
1356 with self._repo.wlock():
1361 with self._repo.wlock():
1357 ui, ds = self._repo.ui, self._repo.dirstate
1362 ui, ds = self._repo.ui, self._repo.dirstate
1358 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1363 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1359 rejected = []
1364 rejected = []
1360 lstat = self._repo.wvfs.lstat
1365 lstat = self._repo.wvfs.lstat
1361 for f in list:
1366 for f in list:
1362 # ds.pathto() returns an absolute file when this is invoked from
1367 # ds.pathto() returns an absolute file when this is invoked from
1363 # the keyword extension. That gets flagged as non-portable on
1368 # the keyword extension. That gets flagged as non-portable on
1364 # Windows, since it contains the drive letter and colon.
1369 # Windows, since it contains the drive letter and colon.
1365 scmutil.checkportable(ui, os.path.join(prefix, f))
1370 scmutil.checkportable(ui, os.path.join(prefix, f))
1366 try:
1371 try:
1367 st = lstat(f)
1372 st = lstat(f)
1368 except OSError:
1373 except OSError:
1369 ui.warn(_("%s does not exist!\n") % uipath(f))
1374 ui.warn(_("%s does not exist!\n") % uipath(f))
1370 rejected.append(f)
1375 rejected.append(f)
1371 continue
1376 continue
1372 limit = ui.configbytes('ui', 'large-file-limit')
1377 limit = ui.configbytes('ui', 'large-file-limit')
1373 if limit != 0 and st.st_size > limit:
1378 if limit != 0 and st.st_size > limit:
1374 ui.warn(_("%s: up to %d MB of RAM may be required "
1379 ui.warn(_("%s: up to %d MB of RAM may be required "
1375 "to manage this file\n"
1380 "to manage this file\n"
1376 "(use 'hg revert %s' to cancel the "
1381 "(use 'hg revert %s' to cancel the "
1377 "pending addition)\n")
1382 "pending addition)\n")
1378 % (f, 3 * st.st_size // 1000000, uipath(f)))
1383 % (f, 3 * st.st_size // 1000000, uipath(f)))
1379 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1384 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1380 ui.warn(_("%s not added: only files and symlinks "
1385 ui.warn(_("%s not added: only files and symlinks "
1381 "supported currently\n") % uipath(f))
1386 "supported currently\n") % uipath(f))
1382 rejected.append(f)
1387 rejected.append(f)
1383 elif ds[f] in 'amn':
1388 elif ds[f] in 'amn':
1384 ui.warn(_("%s already tracked!\n") % uipath(f))
1389 ui.warn(_("%s already tracked!\n") % uipath(f))
1385 elif ds[f] == 'r':
1390 elif ds[f] == 'r':
1386 ds.normallookup(f)
1391 ds.normallookup(f)
1387 else:
1392 else:
1388 ds.add(f)
1393 ds.add(f)
1389 return rejected
1394 return rejected
1390
1395
1391 def forget(self, files, prefix=""):
1396 def forget(self, files, prefix=""):
1392 with self._repo.wlock():
1397 with self._repo.wlock():
1393 ds = self._repo.dirstate
1398 ds = self._repo.dirstate
1394 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1399 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1395 rejected = []
1400 rejected = []
1396 for f in files:
1401 for f in files:
1397 if f not in ds:
1402 if f not in ds:
1398 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1403 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1399 rejected.append(f)
1404 rejected.append(f)
1400 elif ds[f] != 'a':
1405 elif ds[f] != 'a':
1401 ds.remove(f)
1406 ds.remove(f)
1402 else:
1407 else:
1403 ds.drop(f)
1408 ds.drop(f)
1404 return rejected
1409 return rejected
1405
1410
1406 def copy(self, source, dest):
1411 def copy(self, source, dest):
1407 try:
1412 try:
1408 st = self._repo.wvfs.lstat(dest)
1413 st = self._repo.wvfs.lstat(dest)
1409 except OSError as err:
1414 except OSError as err:
1410 if err.errno != errno.ENOENT:
1415 if err.errno != errno.ENOENT:
1411 raise
1416 raise
1412 self._repo.ui.warn(_("%s does not exist!\n")
1417 self._repo.ui.warn(_("%s does not exist!\n")
1413 % self._repo.dirstate.pathto(dest))
1418 % self._repo.dirstate.pathto(dest))
1414 return
1419 return
1415 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1420 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1416 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1421 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1417 "symbolic link\n")
1422 "symbolic link\n")
1418 % self._repo.dirstate.pathto(dest))
1423 % self._repo.dirstate.pathto(dest))
1419 else:
1424 else:
1420 with self._repo.wlock():
1425 with self._repo.wlock():
1421 ds = self._repo.dirstate
1426 ds = self._repo.dirstate
1422 if ds[dest] in '?':
1427 if ds[dest] in '?':
1423 ds.add(dest)
1428 ds.add(dest)
1424 elif ds[dest] in 'r':
1429 elif ds[dest] in 'r':
1425 ds.normallookup(dest)
1430 ds.normallookup(dest)
1426 ds.copy(source, dest)
1431 ds.copy(source, dest)
1427
1432
1428 def match(self, pats=None, include=None, exclude=None, default='glob',
1433 def match(self, pats=None, include=None, exclude=None, default='glob',
1429 listsubrepos=False, badfn=None):
1434 listsubrepos=False, badfn=None):
1430 r = self._repo
1435 r = self._repo
1431
1436
1432 # Only a case insensitive filesystem needs magic to translate user input
1437 # Only a case insensitive filesystem needs magic to translate user input
1433 # to actual case in the filesystem.
1438 # to actual case in the filesystem.
1434 icasefs = not util.fscasesensitive(r.root)
1439 icasefs = not util.fscasesensitive(r.root)
1435 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1440 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1436 default, auditor=r.auditor, ctx=self,
1441 default, auditor=r.auditor, ctx=self,
1437 listsubrepos=listsubrepos, badfn=badfn,
1442 listsubrepos=listsubrepos, badfn=badfn,
1438 icasefs=icasefs)
1443 icasefs=icasefs)
1439
1444
1440 def _filtersuspectsymlink(self, files):
1445 def _filtersuspectsymlink(self, files):
1441 if not files or self._repo.dirstate._checklink:
1446 if not files or self._repo.dirstate._checklink:
1442 return files
1447 return files
1443
1448
1444 # Symlink placeholders may get non-symlink-like contents
1449 # Symlink placeholders may get non-symlink-like contents
1445 # via user error or dereferencing by NFS or Samba servers,
1450 # via user error or dereferencing by NFS or Samba servers,
1446 # so we filter out any placeholders that don't look like a
1451 # so we filter out any placeholders that don't look like a
1447 # symlink
1452 # symlink
1448 sane = []
1453 sane = []
1449 for f in files:
1454 for f in files:
1450 if self.flags(f) == 'l':
1455 if self.flags(f) == 'l':
1451 d = self[f].data()
1456 d = self[f].data()
1452 if (d == '' or len(d) >= 1024 or '\n' in d
1457 if (d == '' or len(d) >= 1024 or '\n' in d
1453 or stringutil.binary(d)):
1458 or stringutil.binary(d)):
1454 self._repo.ui.debug('ignoring suspect symlink placeholder'
1459 self._repo.ui.debug('ignoring suspect symlink placeholder'
1455 ' "%s"\n' % f)
1460 ' "%s"\n' % f)
1456 continue
1461 continue
1457 sane.append(f)
1462 sane.append(f)
1458 return sane
1463 return sane
1459
1464
1460 def _checklookup(self, files):
1465 def _checklookup(self, files):
1461 # check for any possibly clean files
1466 # check for any possibly clean files
1462 if not files:
1467 if not files:
1463 return [], [], []
1468 return [], [], []
1464
1469
1465 modified = []
1470 modified = []
1466 deleted = []
1471 deleted = []
1467 fixup = []
1472 fixup = []
1468 pctx = self._parents[0]
1473 pctx = self._parents[0]
1469 # do a full compare of any files that might have changed
1474 # do a full compare of any files that might have changed
1470 for f in sorted(files):
1475 for f in sorted(files):
1471 try:
1476 try:
1472 # This will return True for a file that got replaced by a
1477 # This will return True for a file that got replaced by a
1473 # directory in the interim, but fixing that is pretty hard.
1478 # directory in the interim, but fixing that is pretty hard.
1474 if (f not in pctx or self.flags(f) != pctx.flags(f)
1479 if (f not in pctx or self.flags(f) != pctx.flags(f)
1475 or pctx[f].cmp(self[f])):
1480 or pctx[f].cmp(self[f])):
1476 modified.append(f)
1481 modified.append(f)
1477 else:
1482 else:
1478 fixup.append(f)
1483 fixup.append(f)
1479 except (IOError, OSError):
1484 except (IOError, OSError):
1480 # A file become inaccessible in between? Mark it as deleted,
1485 # A file become inaccessible in between? Mark it as deleted,
1481 # matching dirstate behavior (issue5584).
1486 # matching dirstate behavior (issue5584).
1482 # The dirstate has more complex behavior around whether a
1487 # The dirstate has more complex behavior around whether a
1483 # missing file matches a directory, etc, but we don't need to
1488 # missing file matches a directory, etc, but we don't need to
1484 # bother with that: if f has made it to this point, we're sure
1489 # bother with that: if f has made it to this point, we're sure
1485 # it's in the dirstate.
1490 # it's in the dirstate.
1486 deleted.append(f)
1491 deleted.append(f)
1487
1492
1488 return modified, deleted, fixup
1493 return modified, deleted, fixup
1489
1494
1490 def _poststatusfixup(self, status, fixup):
1495 def _poststatusfixup(self, status, fixup):
1491 """update dirstate for files that are actually clean"""
1496 """update dirstate for files that are actually clean"""
1492 poststatus = self._repo.postdsstatus()
1497 poststatus = self._repo.postdsstatus()
1493 if fixup or poststatus:
1498 if fixup or poststatus:
1494 try:
1499 try:
1495 oldid = self._repo.dirstate.identity()
1500 oldid = self._repo.dirstate.identity()
1496
1501
1497 # updating the dirstate is optional
1502 # updating the dirstate is optional
1498 # so we don't wait on the lock
1503 # so we don't wait on the lock
1499 # wlock can invalidate the dirstate, so cache normal _after_
1504 # wlock can invalidate the dirstate, so cache normal _after_
1500 # taking the lock
1505 # taking the lock
1501 with self._repo.wlock(False):
1506 with self._repo.wlock(False):
1502 if self._repo.dirstate.identity() == oldid:
1507 if self._repo.dirstate.identity() == oldid:
1503 if fixup:
1508 if fixup:
1504 normal = self._repo.dirstate.normal
1509 normal = self._repo.dirstate.normal
1505 for f in fixup:
1510 for f in fixup:
1506 normal(f)
1511 normal(f)
1507 # write changes out explicitly, because nesting
1512 # write changes out explicitly, because nesting
1508 # wlock at runtime may prevent 'wlock.release()'
1513 # wlock at runtime may prevent 'wlock.release()'
1509 # after this block from doing so for subsequent
1514 # after this block from doing so for subsequent
1510 # changing files
1515 # changing files
1511 tr = self._repo.currenttransaction()
1516 tr = self._repo.currenttransaction()
1512 self._repo.dirstate.write(tr)
1517 self._repo.dirstate.write(tr)
1513
1518
1514 if poststatus:
1519 if poststatus:
1515 for ps in poststatus:
1520 for ps in poststatus:
1516 ps(self, status)
1521 ps(self, status)
1517 else:
1522 else:
1518 # in this case, writing changes out breaks
1523 # in this case, writing changes out breaks
1519 # consistency, because .hg/dirstate was
1524 # consistency, because .hg/dirstate was
1520 # already changed simultaneously after last
1525 # already changed simultaneously after last
1521 # caching (see also issue5584 for detail)
1526 # caching (see also issue5584 for detail)
1522 self._repo.ui.debug('skip updating dirstate: '
1527 self._repo.ui.debug('skip updating dirstate: '
1523 'identity mismatch\n')
1528 'identity mismatch\n')
1524 except error.LockError:
1529 except error.LockError:
1525 pass
1530 pass
1526 finally:
1531 finally:
1527 # Even if the wlock couldn't be grabbed, clear out the list.
1532 # Even if the wlock couldn't be grabbed, clear out the list.
1528 self._repo.clearpostdsstatus()
1533 self._repo.clearpostdsstatus()
1529
1534
1530 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1535 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1531 '''Gets the status from the dirstate -- internal use only.'''
1536 '''Gets the status from the dirstate -- internal use only.'''
1532 subrepos = []
1537 subrepos = []
1533 if '.hgsub' in self:
1538 if '.hgsub' in self:
1534 subrepos = sorted(self.substate)
1539 subrepos = sorted(self.substate)
1535 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1540 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1536 clean=clean, unknown=unknown)
1541 clean=clean, unknown=unknown)
1537
1542
1538 # check for any possibly clean files
1543 # check for any possibly clean files
1539 fixup = []
1544 fixup = []
1540 if cmp:
1545 if cmp:
1541 modified2, deleted2, fixup = self._checklookup(cmp)
1546 modified2, deleted2, fixup = self._checklookup(cmp)
1542 s.modified.extend(modified2)
1547 s.modified.extend(modified2)
1543 s.deleted.extend(deleted2)
1548 s.deleted.extend(deleted2)
1544
1549
1545 if fixup and clean:
1550 if fixup and clean:
1546 s.clean.extend(fixup)
1551 s.clean.extend(fixup)
1547
1552
1548 self._poststatusfixup(s, fixup)
1553 self._poststatusfixup(s, fixup)
1549
1554
1550 if match.always():
1555 if match.always():
1551 # cache for performance
1556 # cache for performance
1552 if s.unknown or s.ignored or s.clean:
1557 if s.unknown or s.ignored or s.clean:
1553 # "_status" is cached with list*=False in the normal route
1558 # "_status" is cached with list*=False in the normal route
1554 self._status = scmutil.status(s.modified, s.added, s.removed,
1559 self._status = scmutil.status(s.modified, s.added, s.removed,
1555 s.deleted, [], [], [])
1560 s.deleted, [], [], [])
1556 else:
1561 else:
1557 self._status = s
1562 self._status = s
1558
1563
1559 return s
1564 return s
1560
1565
1561 @propertycache
1566 @propertycache
1562 def _copies(self):
1567 def _copies(self):
1563 p1copies = {}
1568 p1copies = {}
1564 p2copies = {}
1569 p2copies = {}
1565 parents = self._repo.dirstate.parents()
1570 parents = self._repo.dirstate.parents()
1566 p1manifest = self._repo[parents[0]].manifest()
1571 p1manifest = self._repo[parents[0]].manifest()
1567 p2manifest = self._repo[parents[1]].manifest()
1572 p2manifest = self._repo[parents[1]].manifest()
1568 changedset = set(self.added()) | set(self.modified())
1573 changedset = set(self.added()) | set(self.modified())
1569 narrowmatch = self._repo.narrowmatch()
1574 narrowmatch = self._repo.narrowmatch()
1570 for dst, src in self._repo.dirstate.copies().items():
1575 for dst, src in self._repo.dirstate.copies().items():
1571 if dst not in changedset or not narrowmatch(dst):
1576 if dst not in changedset or not narrowmatch(dst):
1572 continue
1577 continue
1573 if src in p1manifest:
1578 if src in p1manifest:
1574 p1copies[dst] = src
1579 p1copies[dst] = src
1575 elif src in p2manifest:
1580 elif src in p2manifest:
1576 p2copies[dst] = src
1581 p2copies[dst] = src
1577 return p1copies, p2copies
1582 return p1copies, p2copies
1578
1583
1579 @propertycache
1584 @propertycache
1580 def _manifest(self):
1585 def _manifest(self):
1581 """generate a manifest corresponding to the values in self._status
1586 """generate a manifest corresponding to the values in self._status
1582
1587
1583 This reuse the file nodeid from parent, but we use special node
1588 This reuse the file nodeid from parent, but we use special node
1584 identifiers for added and modified files. This is used by manifests
1589 identifiers for added and modified files. This is used by manifests
1585 merge to see that files are different and by update logic to avoid
1590 merge to see that files are different and by update logic to avoid
1586 deleting newly added files.
1591 deleting newly added files.
1587 """
1592 """
1588 return self._buildstatusmanifest(self._status)
1593 return self._buildstatusmanifest(self._status)
1589
1594
1590 def _buildstatusmanifest(self, status):
1595 def _buildstatusmanifest(self, status):
1591 """Builds a manifest that includes the given status results."""
1596 """Builds a manifest that includes the given status results."""
1592 parents = self.parents()
1597 parents = self.parents()
1593
1598
1594 man = parents[0].manifest().copy()
1599 man = parents[0].manifest().copy()
1595
1600
1596 ff = self._flagfunc
1601 ff = self._flagfunc
1597 for i, l in ((addednodeid, status.added),
1602 for i, l in ((addednodeid, status.added),
1598 (modifiednodeid, status.modified)):
1603 (modifiednodeid, status.modified)):
1599 for f in l:
1604 for f in l:
1600 man[f] = i
1605 man[f] = i
1601 try:
1606 try:
1602 man.setflag(f, ff(f))
1607 man.setflag(f, ff(f))
1603 except OSError:
1608 except OSError:
1604 pass
1609 pass
1605
1610
1606 for f in status.deleted + status.removed:
1611 for f in status.deleted + status.removed:
1607 if f in man:
1612 if f in man:
1608 del man[f]
1613 del man[f]
1609
1614
1610 return man
1615 return man
1611
1616
1612 def _buildstatus(self, other, s, match, listignored, listclean,
1617 def _buildstatus(self, other, s, match, listignored, listclean,
1613 listunknown):
1618 listunknown):
1614 """build a status with respect to another context
1619 """build a status with respect to another context
1615
1620
1616 This includes logic for maintaining the fast path of status when
1621 This includes logic for maintaining the fast path of status when
1617 comparing the working directory against its parent, which is to skip
1622 comparing the working directory against its parent, which is to skip
1618 building a new manifest if self (working directory) is not comparing
1623 building a new manifest if self (working directory) is not comparing
1619 against its parent (repo['.']).
1624 against its parent (repo['.']).
1620 """
1625 """
1621 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1626 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1622 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1627 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1623 # might have accidentally ended up with the entire contents of the file
1628 # might have accidentally ended up with the entire contents of the file
1624 # they are supposed to be linking to.
1629 # they are supposed to be linking to.
1625 s.modified[:] = self._filtersuspectsymlink(s.modified)
1630 s.modified[:] = self._filtersuspectsymlink(s.modified)
1626 if other != self._repo['.']:
1631 if other != self._repo['.']:
1627 s = super(workingctx, self)._buildstatus(other, s, match,
1632 s = super(workingctx, self)._buildstatus(other, s, match,
1628 listignored, listclean,
1633 listignored, listclean,
1629 listunknown)
1634 listunknown)
1630 return s
1635 return s
1631
1636
1632 def _matchstatus(self, other, match):
1637 def _matchstatus(self, other, match):
1633 """override the match method with a filter for directory patterns
1638 """override the match method with a filter for directory patterns
1634
1639
1635 We use inheritance to customize the match.bad method only in cases of
1640 We use inheritance to customize the match.bad method only in cases of
1636 workingctx since it belongs only to the working directory when
1641 workingctx since it belongs only to the working directory when
1637 comparing against the parent changeset.
1642 comparing against the parent changeset.
1638
1643
1639 If we aren't comparing against the working directory's parent, then we
1644 If we aren't comparing against the working directory's parent, then we
1640 just use the default match object sent to us.
1645 just use the default match object sent to us.
1641 """
1646 """
1642 if other != self._repo['.']:
1647 if other != self._repo['.']:
1643 def bad(f, msg):
1648 def bad(f, msg):
1644 # 'f' may be a directory pattern from 'match.files()',
1649 # 'f' may be a directory pattern from 'match.files()',
1645 # so 'f not in ctx1' is not enough
1650 # so 'f not in ctx1' is not enough
1646 if f not in other and not other.hasdir(f):
1651 if f not in other and not other.hasdir(f):
1647 self._repo.ui.warn('%s: %s\n' %
1652 self._repo.ui.warn('%s: %s\n' %
1648 (self._repo.dirstate.pathto(f), msg))
1653 (self._repo.dirstate.pathto(f), msg))
1649 match.bad = bad
1654 match.bad = bad
1650 return match
1655 return match
1651
1656
1652 def walk(self, match):
1657 def walk(self, match):
1653 '''Generates matching file names.'''
1658 '''Generates matching file names.'''
1654 return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
1659 return sorted(self._repo.dirstate.walk(self._repo.narrowmatch(match),
1655 subrepos=sorted(self.substate),
1660 subrepos=sorted(self.substate),
1656 unknown=True, ignored=False))
1661 unknown=True, ignored=False))
1657
1662
1658 def matches(self, match):
1663 def matches(self, match):
1659 match = self._repo.narrowmatch(match)
1664 match = self._repo.narrowmatch(match)
1660 ds = self._repo.dirstate
1665 ds = self._repo.dirstate
1661 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1666 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1662
1667
1663 def markcommitted(self, node):
1668 def markcommitted(self, node):
1664 with self._repo.dirstate.parentchange():
1669 with self._repo.dirstate.parentchange():
1665 for f in self.modified() + self.added():
1670 for f in self.modified() + self.added():
1666 self._repo.dirstate.normal(f)
1671 self._repo.dirstate.normal(f)
1667 for f in self.removed():
1672 for f in self.removed():
1668 self._repo.dirstate.drop(f)
1673 self._repo.dirstate.drop(f)
1669 self._repo.dirstate.setparents(node)
1674 self._repo.dirstate.setparents(node)
1670
1675
1671 # write changes out explicitly, because nesting wlock at
1676 # write changes out explicitly, because nesting wlock at
1672 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1677 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1673 # from immediately doing so for subsequent changing files
1678 # from immediately doing so for subsequent changing files
1674 self._repo.dirstate.write(self._repo.currenttransaction())
1679 self._repo.dirstate.write(self._repo.currenttransaction())
1675
1680
1676 sparse.aftercommit(self._repo, node)
1681 sparse.aftercommit(self._repo, node)
1677
1682
1678 class committablefilectx(basefilectx):
1683 class committablefilectx(basefilectx):
1679 """A committablefilectx provides common functionality for a file context
1684 """A committablefilectx provides common functionality for a file context
1680 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1685 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1681 def __init__(self, repo, path, filelog=None, ctx=None):
1686 def __init__(self, repo, path, filelog=None, ctx=None):
1682 self._repo = repo
1687 self._repo = repo
1683 self._path = path
1688 self._path = path
1684 self._changeid = None
1689 self._changeid = None
1685 self._filerev = self._filenode = None
1690 self._filerev = self._filenode = None
1686
1691
1687 if filelog is not None:
1692 if filelog is not None:
1688 self._filelog = filelog
1693 self._filelog = filelog
1689 if ctx:
1694 if ctx:
1690 self._changectx = ctx
1695 self._changectx = ctx
1691
1696
1692 def __nonzero__(self):
1697 def __nonzero__(self):
1693 return True
1698 return True
1694
1699
1695 __bool__ = __nonzero__
1700 __bool__ = __nonzero__
1696
1701
1697 def linkrev(self):
1702 def linkrev(self):
1698 # linked to self._changectx no matter if file is modified or not
1703 # linked to self._changectx no matter if file is modified or not
1699 return self.rev()
1704 return self.rev()
1700
1705
1701 def renamed(self):
1706 def renamed(self):
1702 path = self.copysource()
1707 path = self.copysource()
1703 if not path:
1708 if not path:
1704 return None
1709 return None
1705 return path, self._changectx._parents[0]._manifest.get(path, nullid)
1710 return path, self._changectx._parents[0]._manifest.get(path, nullid)
1706
1711
1707 def parents(self):
1712 def parents(self):
1708 '''return parent filectxs, following copies if necessary'''
1713 '''return parent filectxs, following copies if necessary'''
1709 def filenode(ctx, path):
1714 def filenode(ctx, path):
1710 return ctx._manifest.get(path, nullid)
1715 return ctx._manifest.get(path, nullid)
1711
1716
1712 path = self._path
1717 path = self._path
1713 fl = self._filelog
1718 fl = self._filelog
1714 pcl = self._changectx._parents
1719 pcl = self._changectx._parents
1715 renamed = self.renamed()
1720 renamed = self.renamed()
1716
1721
1717 if renamed:
1722 if renamed:
1718 pl = [renamed + (None,)]
1723 pl = [renamed + (None,)]
1719 else:
1724 else:
1720 pl = [(path, filenode(pcl[0], path), fl)]
1725 pl = [(path, filenode(pcl[0], path), fl)]
1721
1726
1722 for pc in pcl[1:]:
1727 for pc in pcl[1:]:
1723 pl.append((path, filenode(pc, path), fl))
1728 pl.append((path, filenode(pc, path), fl))
1724
1729
1725 return [self._parentfilectx(p, fileid=n, filelog=l)
1730 return [self._parentfilectx(p, fileid=n, filelog=l)
1726 for p, n, l in pl if n != nullid]
1731 for p, n, l in pl if n != nullid]
1727
1732
1728 def children(self):
1733 def children(self):
1729 return []
1734 return []
1730
1735
1731 class workingfilectx(committablefilectx):
1736 class workingfilectx(committablefilectx):
1732 """A workingfilectx object makes access to data related to a particular
1737 """A workingfilectx object makes access to data related to a particular
1733 file in the working directory convenient."""
1738 file in the working directory convenient."""
1734 def __init__(self, repo, path, filelog=None, workingctx=None):
1739 def __init__(self, repo, path, filelog=None, workingctx=None):
1735 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1740 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1736
1741
1737 @propertycache
1742 @propertycache
1738 def _changectx(self):
1743 def _changectx(self):
1739 return workingctx(self._repo)
1744 return workingctx(self._repo)
1740
1745
1741 def data(self):
1746 def data(self):
1742 return self._repo.wread(self._path)
1747 return self._repo.wread(self._path)
1743 def copysource(self):
1748 def copysource(self):
1744 return self._repo.dirstate.copied(self._path)
1749 return self._repo.dirstate.copied(self._path)
1745
1750
1746 def size(self):
1751 def size(self):
1747 return self._repo.wvfs.lstat(self._path).st_size
1752 return self._repo.wvfs.lstat(self._path).st_size
1748 def lstat(self):
1753 def lstat(self):
1749 return self._repo.wvfs.lstat(self._path)
1754 return self._repo.wvfs.lstat(self._path)
1750 def date(self):
1755 def date(self):
1751 t, tz = self._changectx.date()
1756 t, tz = self._changectx.date()
1752 try:
1757 try:
1753 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1758 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1754 except OSError as err:
1759 except OSError as err:
1755 if err.errno != errno.ENOENT:
1760 if err.errno != errno.ENOENT:
1756 raise
1761 raise
1757 return (t, tz)
1762 return (t, tz)
1758
1763
1759 def exists(self):
1764 def exists(self):
1760 return self._repo.wvfs.exists(self._path)
1765 return self._repo.wvfs.exists(self._path)
1761
1766
1762 def lexists(self):
1767 def lexists(self):
1763 return self._repo.wvfs.lexists(self._path)
1768 return self._repo.wvfs.lexists(self._path)
1764
1769
1765 def audit(self):
1770 def audit(self):
1766 return self._repo.wvfs.audit(self._path)
1771 return self._repo.wvfs.audit(self._path)
1767
1772
1768 def cmp(self, fctx):
1773 def cmp(self, fctx):
1769 """compare with other file context
1774 """compare with other file context
1770
1775
1771 returns True if different than fctx.
1776 returns True if different than fctx.
1772 """
1777 """
1773 # fctx should be a filectx (not a workingfilectx)
1778 # fctx should be a filectx (not a workingfilectx)
1774 # invert comparison to reuse the same code path
1779 # invert comparison to reuse the same code path
1775 return fctx.cmp(self)
1780 return fctx.cmp(self)
1776
1781
1777 def remove(self, ignoremissing=False):
1782 def remove(self, ignoremissing=False):
1778 """wraps unlink for a repo's working directory"""
1783 """wraps unlink for a repo's working directory"""
1779 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1784 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1780 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1785 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1781 rmdir=rmdir)
1786 rmdir=rmdir)
1782
1787
1783 def write(self, data, flags, backgroundclose=False, **kwargs):
1788 def write(self, data, flags, backgroundclose=False, **kwargs):
1784 """wraps repo.wwrite"""
1789 """wraps repo.wwrite"""
1785 return self._repo.wwrite(self._path, data, flags,
1790 return self._repo.wwrite(self._path, data, flags,
1786 backgroundclose=backgroundclose,
1791 backgroundclose=backgroundclose,
1787 **kwargs)
1792 **kwargs)
1788
1793
1789 def markcopied(self, src):
1794 def markcopied(self, src):
1790 """marks this file a copy of `src`"""
1795 """marks this file a copy of `src`"""
1791 self._repo.dirstate.copy(src, self._path)
1796 self._repo.dirstate.copy(src, self._path)
1792
1797
1793 def clearunknown(self):
1798 def clearunknown(self):
1794 """Removes conflicting items in the working directory so that
1799 """Removes conflicting items in the working directory so that
1795 ``write()`` can be called successfully.
1800 ``write()`` can be called successfully.
1796 """
1801 """
1797 wvfs = self._repo.wvfs
1802 wvfs = self._repo.wvfs
1798 f = self._path
1803 f = self._path
1799 wvfs.audit(f)
1804 wvfs.audit(f)
1800 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1805 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1801 # remove files under the directory as they should already be
1806 # remove files under the directory as they should already be
1802 # warned and backed up
1807 # warned and backed up
1803 if wvfs.isdir(f) and not wvfs.islink(f):
1808 if wvfs.isdir(f) and not wvfs.islink(f):
1804 wvfs.rmtree(f, forcibly=True)
1809 wvfs.rmtree(f, forcibly=True)
1805 for p in reversed(list(util.finddirs(f))):
1810 for p in reversed(list(util.finddirs(f))):
1806 if wvfs.isfileorlink(p):
1811 if wvfs.isfileorlink(p):
1807 wvfs.unlink(p)
1812 wvfs.unlink(p)
1808 break
1813 break
1809 else:
1814 else:
1810 # don't remove files if path conflicts are not processed
1815 # don't remove files if path conflicts are not processed
1811 if wvfs.isdir(f) and not wvfs.islink(f):
1816 if wvfs.isdir(f) and not wvfs.islink(f):
1812 wvfs.removedirs(f)
1817 wvfs.removedirs(f)
1813
1818
1814 def setflags(self, l, x):
1819 def setflags(self, l, x):
1815 self._repo.wvfs.setflags(self._path, l, x)
1820 self._repo.wvfs.setflags(self._path, l, x)
1816
1821
1817 class overlayworkingctx(committablectx):
1822 class overlayworkingctx(committablectx):
1818 """Wraps another mutable context with a write-back cache that can be
1823 """Wraps another mutable context with a write-back cache that can be
1819 converted into a commit context.
1824 converted into a commit context.
1820
1825
1821 self._cache[path] maps to a dict with keys: {
1826 self._cache[path] maps to a dict with keys: {
1822 'exists': bool?
1827 'exists': bool?
1823 'date': date?
1828 'date': date?
1824 'data': str?
1829 'data': str?
1825 'flags': str?
1830 'flags': str?
1826 'copied': str? (path or None)
1831 'copied': str? (path or None)
1827 }
1832 }
1828 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1833 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1829 is `False`, the file was deleted.
1834 is `False`, the file was deleted.
1830 """
1835 """
1831
1836
1832 def __init__(self, repo):
1837 def __init__(self, repo):
1833 super(overlayworkingctx, self).__init__(repo)
1838 super(overlayworkingctx, self).__init__(repo)
1834 self.clean()
1839 self.clean()
1835
1840
1836 def setbase(self, wrappedctx):
1841 def setbase(self, wrappedctx):
1837 self._wrappedctx = wrappedctx
1842 self._wrappedctx = wrappedctx
1838 self._parents = [wrappedctx]
1843 self._parents = [wrappedctx]
1839 # Drop old manifest cache as it is now out of date.
1844 # Drop old manifest cache as it is now out of date.
1840 # This is necessary when, e.g., rebasing several nodes with one
1845 # This is necessary when, e.g., rebasing several nodes with one
1841 # ``overlayworkingctx`` (e.g. with --collapse).
1846 # ``overlayworkingctx`` (e.g. with --collapse).
1842 util.clearcachedproperty(self, '_manifest')
1847 util.clearcachedproperty(self, '_manifest')
1843
1848
1844 def data(self, path):
1849 def data(self, path):
1845 if self.isdirty(path):
1850 if self.isdirty(path):
1846 if self._cache[path]['exists']:
1851 if self._cache[path]['exists']:
1847 if self._cache[path]['data'] is not None:
1852 if self._cache[path]['data'] is not None:
1848 return self._cache[path]['data']
1853 return self._cache[path]['data']
1849 else:
1854 else:
1850 # Must fallback here, too, because we only set flags.
1855 # Must fallback here, too, because we only set flags.
1851 return self._wrappedctx[path].data()
1856 return self._wrappedctx[path].data()
1852 else:
1857 else:
1853 raise error.ProgrammingError("No such file or directory: %s" %
1858 raise error.ProgrammingError("No such file or directory: %s" %
1854 path)
1859 path)
1855 else:
1860 else:
1856 return self._wrappedctx[path].data()
1861 return self._wrappedctx[path].data()
1857
1862
1858 @propertycache
1863 @propertycache
1859 def _manifest(self):
1864 def _manifest(self):
1860 parents = self.parents()
1865 parents = self.parents()
1861 man = parents[0].manifest().copy()
1866 man = parents[0].manifest().copy()
1862
1867
1863 flag = self._flagfunc
1868 flag = self._flagfunc
1864 for path in self.added():
1869 for path in self.added():
1865 man[path] = addednodeid
1870 man[path] = addednodeid
1866 man.setflag(path, flag(path))
1871 man.setflag(path, flag(path))
1867 for path in self.modified():
1872 for path in self.modified():
1868 man[path] = modifiednodeid
1873 man[path] = modifiednodeid
1869 man.setflag(path, flag(path))
1874 man.setflag(path, flag(path))
1870 for path in self.removed():
1875 for path in self.removed():
1871 del man[path]
1876 del man[path]
1872 return man
1877 return man
1873
1878
1874 @propertycache
1879 @propertycache
1875 def _flagfunc(self):
1880 def _flagfunc(self):
1876 def f(path):
1881 def f(path):
1877 return self._cache[path]['flags']
1882 return self._cache[path]['flags']
1878 return f
1883 return f
1879
1884
1880 def files(self):
1885 def files(self):
1881 return sorted(self.added() + self.modified() + self.removed())
1886 return sorted(self.added() + self.modified() + self.removed())
1882
1887
1883 def modified(self):
1888 def modified(self):
1884 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1889 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1885 self._existsinparent(f)]
1890 self._existsinparent(f)]
1886
1891
1887 def added(self):
1892 def added(self):
1888 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1893 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1889 not self._existsinparent(f)]
1894 not self._existsinparent(f)]
1890
1895
1891 def removed(self):
1896 def removed(self):
1892 return [f for f in self._cache.keys() if
1897 return [f for f in self._cache.keys() if
1893 not self._cache[f]['exists'] and self._existsinparent(f)]
1898 not self._cache[f]['exists'] and self._existsinparent(f)]
1894
1899
1895 def p1copies(self):
1900 def p1copies(self):
1896 copies = self._repo._wrappedctx.p1copies().copy()
1901 copies = self._repo._wrappedctx.p1copies().copy()
1897 narrowmatch = self._repo.narrowmatch()
1902 narrowmatch = self._repo.narrowmatch()
1898 for f in self._cache.keys():
1903 for f in self._cache.keys():
1899 if not narrowmatch(f):
1904 if not narrowmatch(f):
1900 continue
1905 continue
1901 copies.pop(f, None) # delete if it exists
1906 copies.pop(f, None) # delete if it exists
1902 source = self._cache[f]['copied']
1907 source = self._cache[f]['copied']
1903 if source:
1908 if source:
1904 copies[f] = source
1909 copies[f] = source
1905 return copies
1910 return copies
1906
1911
1907 def p2copies(self):
1912 def p2copies(self):
1908 copies = self._repo._wrappedctx.p2copies().copy()
1913 copies = self._repo._wrappedctx.p2copies().copy()
1909 narrowmatch = self._repo.narrowmatch()
1914 narrowmatch = self._repo.narrowmatch()
1910 for f in self._cache.keys():
1915 for f in self._cache.keys():
1911 if not narrowmatch(f):
1916 if not narrowmatch(f):
1912 continue
1917 continue
1913 copies.pop(f, None) # delete if it exists
1918 copies.pop(f, None) # delete if it exists
1914 source = self._cache[f]['copied']
1919 source = self._cache[f]['copied']
1915 if source:
1920 if source:
1916 copies[f] = source
1921 copies[f] = source
1917 return copies
1922 return copies
1918
1923
1919 def isinmemory(self):
1924 def isinmemory(self):
1920 return True
1925 return True
1921
1926
1922 def filedate(self, path):
1927 def filedate(self, path):
1923 if self.isdirty(path):
1928 if self.isdirty(path):
1924 return self._cache[path]['date']
1929 return self._cache[path]['date']
1925 else:
1930 else:
1926 return self._wrappedctx[path].date()
1931 return self._wrappedctx[path].date()
1927
1932
1928 def markcopied(self, path, origin):
1933 def markcopied(self, path, origin):
1929 self._markdirty(path, exists=True, date=self.filedate(path),
1934 self._markdirty(path, exists=True, date=self.filedate(path),
1930 flags=self.flags(path), copied=origin)
1935 flags=self.flags(path), copied=origin)
1931
1936
1932 def copydata(self, path):
1937 def copydata(self, path):
1933 if self.isdirty(path):
1938 if self.isdirty(path):
1934 return self._cache[path]['copied']
1939 return self._cache[path]['copied']
1935 else:
1940 else:
1936 return None
1941 return None
1937
1942
1938 def flags(self, path):
1943 def flags(self, path):
1939 if self.isdirty(path):
1944 if self.isdirty(path):
1940 if self._cache[path]['exists']:
1945 if self._cache[path]['exists']:
1941 return self._cache[path]['flags']
1946 return self._cache[path]['flags']
1942 else:
1947 else:
1943 raise error.ProgrammingError("No such file or directory: %s" %
1948 raise error.ProgrammingError("No such file or directory: %s" %
1944 self._path)
1949 self._path)
1945 else:
1950 else:
1946 return self._wrappedctx[path].flags()
1951 return self._wrappedctx[path].flags()
1947
1952
1948 def __contains__(self, key):
1953 def __contains__(self, key):
1949 if key in self._cache:
1954 if key in self._cache:
1950 return self._cache[key]['exists']
1955 return self._cache[key]['exists']
1951 return key in self.p1()
1956 return key in self.p1()
1952
1957
1953 def _existsinparent(self, path):
1958 def _existsinparent(self, path):
1954 try:
1959 try:
1955 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1960 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1956 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1961 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1957 # with an ``exists()`` function.
1962 # with an ``exists()`` function.
1958 self._wrappedctx[path]
1963 self._wrappedctx[path]
1959 return True
1964 return True
1960 except error.ManifestLookupError:
1965 except error.ManifestLookupError:
1961 return False
1966 return False
1962
1967
1963 def _auditconflicts(self, path):
1968 def _auditconflicts(self, path):
1964 """Replicates conflict checks done by wvfs.write().
1969 """Replicates conflict checks done by wvfs.write().
1965
1970
1966 Since we never write to the filesystem and never call `applyupdates` in
1971 Since we never write to the filesystem and never call `applyupdates` in
1967 IMM, we'll never check that a path is actually writable -- e.g., because
1972 IMM, we'll never check that a path is actually writable -- e.g., because
1968 it adds `a/foo`, but `a` is actually a file in the other commit.
1973 it adds `a/foo`, but `a` is actually a file in the other commit.
1969 """
1974 """
1970 def fail(path, component):
1975 def fail(path, component):
1971 # p1() is the base and we're receiving "writes" for p2()'s
1976 # p1() is the base and we're receiving "writes" for p2()'s
1972 # files.
1977 # files.
1973 if 'l' in self.p1()[component].flags():
1978 if 'l' in self.p1()[component].flags():
1974 raise error.Abort("error: %s conflicts with symlink %s "
1979 raise error.Abort("error: %s conflicts with symlink %s "
1975 "in %d." % (path, component,
1980 "in %d." % (path, component,
1976 self.p1().rev()))
1981 self.p1().rev()))
1977 else:
1982 else:
1978 raise error.Abort("error: '%s' conflicts with file '%s' in "
1983 raise error.Abort("error: '%s' conflicts with file '%s' in "
1979 "%d." % (path, component,
1984 "%d." % (path, component,
1980 self.p1().rev()))
1985 self.p1().rev()))
1981
1986
1982 # Test that each new directory to be created to write this path from p2
1987 # Test that each new directory to be created to write this path from p2
1983 # is not a file in p1.
1988 # is not a file in p1.
1984 components = path.split('/')
1989 components = path.split('/')
1985 for i in pycompat.xrange(len(components)):
1990 for i in pycompat.xrange(len(components)):
1986 component = "/".join(components[0:i])
1991 component = "/".join(components[0:i])
1987 if component in self:
1992 if component in self:
1988 fail(path, component)
1993 fail(path, component)
1989
1994
1990 # Test the other direction -- that this path from p2 isn't a directory
1995 # Test the other direction -- that this path from p2 isn't a directory
1991 # in p1 (test that p1 doesn't have any paths matching `path/*`).
1996 # in p1 (test that p1 doesn't have any paths matching `path/*`).
1992 match = self.match([path], default=b'path')
1997 match = self.match([path], default=b'path')
1993 matches = self.p1().manifest().matches(match)
1998 matches = self.p1().manifest().matches(match)
1994 mfiles = matches.keys()
1999 mfiles = matches.keys()
1995 if len(mfiles) > 0:
2000 if len(mfiles) > 0:
1996 if len(mfiles) == 1 and mfiles[0] == path:
2001 if len(mfiles) == 1 and mfiles[0] == path:
1997 return
2002 return
1998 # omit the files which are deleted in current IMM wctx
2003 # omit the files which are deleted in current IMM wctx
1999 mfiles = [m for m in mfiles if m in self]
2004 mfiles = [m for m in mfiles if m in self]
2000 if not mfiles:
2005 if not mfiles:
2001 return
2006 return
2002 raise error.Abort("error: file '%s' cannot be written because "
2007 raise error.Abort("error: file '%s' cannot be written because "
2003 " '%s/' is a directory in %s (containing %d "
2008 " '%s/' is a directory in %s (containing %d "
2004 "entries: %s)"
2009 "entries: %s)"
2005 % (path, path, self.p1(), len(mfiles),
2010 % (path, path, self.p1(), len(mfiles),
2006 ', '.join(mfiles)))
2011 ', '.join(mfiles)))
2007
2012
2008 def write(self, path, data, flags='', **kwargs):
2013 def write(self, path, data, flags='', **kwargs):
2009 if data is None:
2014 if data is None:
2010 raise error.ProgrammingError("data must be non-None")
2015 raise error.ProgrammingError("data must be non-None")
2011 self._auditconflicts(path)
2016 self._auditconflicts(path)
2012 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
2017 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
2013 flags=flags)
2018 flags=flags)
2014
2019
2015 def setflags(self, path, l, x):
2020 def setflags(self, path, l, x):
2016 flag = ''
2021 flag = ''
2017 if l:
2022 if l:
2018 flag = 'l'
2023 flag = 'l'
2019 elif x:
2024 elif x:
2020 flag = 'x'
2025 flag = 'x'
2021 self._markdirty(path, exists=True, date=dateutil.makedate(),
2026 self._markdirty(path, exists=True, date=dateutil.makedate(),
2022 flags=flag)
2027 flags=flag)
2023
2028
2024 def remove(self, path):
2029 def remove(self, path):
2025 self._markdirty(path, exists=False)
2030 self._markdirty(path, exists=False)
2026
2031
2027 def exists(self, path):
2032 def exists(self, path):
2028 """exists behaves like `lexists`, but needs to follow symlinks and
2033 """exists behaves like `lexists`, but needs to follow symlinks and
2029 return False if they are broken.
2034 return False if they are broken.
2030 """
2035 """
2031 if self.isdirty(path):
2036 if self.isdirty(path):
2032 # If this path exists and is a symlink, "follow" it by calling
2037 # If this path exists and is a symlink, "follow" it by calling
2033 # exists on the destination path.
2038 # exists on the destination path.
2034 if (self._cache[path]['exists'] and
2039 if (self._cache[path]['exists'] and
2035 'l' in self._cache[path]['flags']):
2040 'l' in self._cache[path]['flags']):
2036 return self.exists(self._cache[path]['data'].strip())
2041 return self.exists(self._cache[path]['data'].strip())
2037 else:
2042 else:
2038 return self._cache[path]['exists']
2043 return self._cache[path]['exists']
2039
2044
2040 return self._existsinparent(path)
2045 return self._existsinparent(path)
2041
2046
2042 def lexists(self, path):
2047 def lexists(self, path):
2043 """lexists returns True if the path exists"""
2048 """lexists returns True if the path exists"""
2044 if self.isdirty(path):
2049 if self.isdirty(path):
2045 return self._cache[path]['exists']
2050 return self._cache[path]['exists']
2046
2051
2047 return self._existsinparent(path)
2052 return self._existsinparent(path)
2048
2053
2049 def size(self, path):
2054 def size(self, path):
2050 if self.isdirty(path):
2055 if self.isdirty(path):
2051 if self._cache[path]['exists']:
2056 if self._cache[path]['exists']:
2052 return len(self._cache[path]['data'])
2057 return len(self._cache[path]['data'])
2053 else:
2058 else:
2054 raise error.ProgrammingError("No such file or directory: %s" %
2059 raise error.ProgrammingError("No such file or directory: %s" %
2055 self._path)
2060 self._path)
2056 return self._wrappedctx[path].size()
2061 return self._wrappedctx[path].size()
2057
2062
2058 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2063 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2059 user=None, editor=None):
2064 user=None, editor=None):
2060 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2065 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2061 committed.
2066 committed.
2062
2067
2063 ``text`` is the commit message.
2068 ``text`` is the commit message.
2064 ``parents`` (optional) are rev numbers.
2069 ``parents`` (optional) are rev numbers.
2065 """
2070 """
2066 # Default parents to the wrapped contexts' if not passed.
2071 # Default parents to the wrapped contexts' if not passed.
2067 if parents is None:
2072 if parents is None:
2068 parents = self._wrappedctx.parents()
2073 parents = self._wrappedctx.parents()
2069 if len(parents) == 1:
2074 if len(parents) == 1:
2070 parents = (parents[0], None)
2075 parents = (parents[0], None)
2071
2076
2072 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2077 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2073 if parents[1] is None:
2078 if parents[1] is None:
2074 parents = (self._repo[parents[0]], None)
2079 parents = (self._repo[parents[0]], None)
2075 else:
2080 else:
2076 parents = (self._repo[parents[0]], self._repo[parents[1]])
2081 parents = (self._repo[parents[0]], self._repo[parents[1]])
2077
2082
2078 files = self.files()
2083 files = self.files()
2079 def getfile(repo, memctx, path):
2084 def getfile(repo, memctx, path):
2080 if self._cache[path]['exists']:
2085 if self._cache[path]['exists']:
2081 return memfilectx(repo, memctx, path,
2086 return memfilectx(repo, memctx, path,
2082 self._cache[path]['data'],
2087 self._cache[path]['data'],
2083 'l' in self._cache[path]['flags'],
2088 'l' in self._cache[path]['flags'],
2084 'x' in self._cache[path]['flags'],
2089 'x' in self._cache[path]['flags'],
2085 self._cache[path]['copied'])
2090 self._cache[path]['copied'])
2086 else:
2091 else:
2087 # Returning None, but including the path in `files`, is
2092 # Returning None, but including the path in `files`, is
2088 # necessary for memctx to register a deletion.
2093 # necessary for memctx to register a deletion.
2089 return None
2094 return None
2090 return memctx(self._repo, parents, text, files, getfile, date=date,
2095 return memctx(self._repo, parents, text, files, getfile, date=date,
2091 extra=extra, user=user, branch=branch, editor=editor)
2096 extra=extra, user=user, branch=branch, editor=editor)
2092
2097
2093 def isdirty(self, path):
2098 def isdirty(self, path):
2094 return path in self._cache
2099 return path in self._cache
2095
2100
2096 def isempty(self):
2101 def isempty(self):
2097 # We need to discard any keys that are actually clean before the empty
2102 # We need to discard any keys that are actually clean before the empty
2098 # commit check.
2103 # commit check.
2099 self._compact()
2104 self._compact()
2100 return len(self._cache) == 0
2105 return len(self._cache) == 0
2101
2106
2102 def clean(self):
2107 def clean(self):
2103 self._cache = {}
2108 self._cache = {}
2104
2109
2105 def _compact(self):
2110 def _compact(self):
2106 """Removes keys from the cache that are actually clean, by comparing
2111 """Removes keys from the cache that are actually clean, by comparing
2107 them with the underlying context.
2112 them with the underlying context.
2108
2113
2109 This can occur during the merge process, e.g. by passing --tool :local
2114 This can occur during the merge process, e.g. by passing --tool :local
2110 to resolve a conflict.
2115 to resolve a conflict.
2111 """
2116 """
2112 keys = []
2117 keys = []
2113 # This won't be perfect, but can help performance significantly when
2118 # This won't be perfect, but can help performance significantly when
2114 # using things like remotefilelog.
2119 # using things like remotefilelog.
2115 scmutil.prefetchfiles(
2120 scmutil.prefetchfiles(
2116 self.repo(), [self.p1().rev()],
2121 self.repo(), [self.p1().rev()],
2117 scmutil.matchfiles(self.repo(), self._cache.keys()))
2122 scmutil.matchfiles(self.repo(), self._cache.keys()))
2118
2123
2119 for path in self._cache.keys():
2124 for path in self._cache.keys():
2120 cache = self._cache[path]
2125 cache = self._cache[path]
2121 try:
2126 try:
2122 underlying = self._wrappedctx[path]
2127 underlying = self._wrappedctx[path]
2123 if (underlying.data() == cache['data'] and
2128 if (underlying.data() == cache['data'] and
2124 underlying.flags() == cache['flags']):
2129 underlying.flags() == cache['flags']):
2125 keys.append(path)
2130 keys.append(path)
2126 except error.ManifestLookupError:
2131 except error.ManifestLookupError:
2127 # Path not in the underlying manifest (created).
2132 # Path not in the underlying manifest (created).
2128 continue
2133 continue
2129
2134
2130 for path in keys:
2135 for path in keys:
2131 del self._cache[path]
2136 del self._cache[path]
2132 return keys
2137 return keys
2133
2138
2134 def _markdirty(self, path, exists, data=None, date=None, flags='',
2139 def _markdirty(self, path, exists, data=None, date=None, flags='',
2135 copied=None):
2140 copied=None):
2136 # data not provided, let's see if we already have some; if not, let's
2141 # data not provided, let's see if we already have some; if not, let's
2137 # grab it from our underlying context, so that we always have data if
2142 # grab it from our underlying context, so that we always have data if
2138 # the file is marked as existing.
2143 # the file is marked as existing.
2139 if exists and data is None:
2144 if exists and data is None:
2140 oldentry = self._cache.get(path) or {}
2145 oldentry = self._cache.get(path) or {}
2141 data = oldentry.get('data')
2146 data = oldentry.get('data')
2142 if data is None:
2147 if data is None:
2143 data = self._wrappedctx[path].data()
2148 data = self._wrappedctx[path].data()
2144
2149
2145 self._cache[path] = {
2150 self._cache[path] = {
2146 'exists': exists,
2151 'exists': exists,
2147 'data': data,
2152 'data': data,
2148 'date': date,
2153 'date': date,
2149 'flags': flags,
2154 'flags': flags,
2150 'copied': copied,
2155 'copied': copied,
2151 }
2156 }
2152
2157
2153 def filectx(self, path, filelog=None):
2158 def filectx(self, path, filelog=None):
2154 return overlayworkingfilectx(self._repo, path, parent=self,
2159 return overlayworkingfilectx(self._repo, path, parent=self,
2155 filelog=filelog)
2160 filelog=filelog)
2156
2161
2157 class overlayworkingfilectx(committablefilectx):
2162 class overlayworkingfilectx(committablefilectx):
2158 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2163 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2159 cache, which can be flushed through later by calling ``flush()``."""
2164 cache, which can be flushed through later by calling ``flush()``."""
2160
2165
2161 def __init__(self, repo, path, filelog=None, parent=None):
2166 def __init__(self, repo, path, filelog=None, parent=None):
2162 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2167 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2163 parent)
2168 parent)
2164 self._repo = repo
2169 self._repo = repo
2165 self._parent = parent
2170 self._parent = parent
2166 self._path = path
2171 self._path = path
2167
2172
2168 def cmp(self, fctx):
2173 def cmp(self, fctx):
2169 return self.data() != fctx.data()
2174 return self.data() != fctx.data()
2170
2175
2171 def changectx(self):
2176 def changectx(self):
2172 return self._parent
2177 return self._parent
2173
2178
2174 def data(self):
2179 def data(self):
2175 return self._parent.data(self._path)
2180 return self._parent.data(self._path)
2176
2181
2177 def date(self):
2182 def date(self):
2178 return self._parent.filedate(self._path)
2183 return self._parent.filedate(self._path)
2179
2184
2180 def exists(self):
2185 def exists(self):
2181 return self.lexists()
2186 return self.lexists()
2182
2187
2183 def lexists(self):
2188 def lexists(self):
2184 return self._parent.exists(self._path)
2189 return self._parent.exists(self._path)
2185
2190
2186 def copysource(self):
2191 def copysource(self):
2187 return self._parent.copydata(self._path)
2192 return self._parent.copydata(self._path)
2188
2193
2189 def size(self):
2194 def size(self):
2190 return self._parent.size(self._path)
2195 return self._parent.size(self._path)
2191
2196
2192 def markcopied(self, origin):
2197 def markcopied(self, origin):
2193 self._parent.markcopied(self._path, origin)
2198 self._parent.markcopied(self._path, origin)
2194
2199
2195 def audit(self):
2200 def audit(self):
2196 pass
2201 pass
2197
2202
2198 def flags(self):
2203 def flags(self):
2199 return self._parent.flags(self._path)
2204 return self._parent.flags(self._path)
2200
2205
2201 def setflags(self, islink, isexec):
2206 def setflags(self, islink, isexec):
2202 return self._parent.setflags(self._path, islink, isexec)
2207 return self._parent.setflags(self._path, islink, isexec)
2203
2208
2204 def write(self, data, flags, backgroundclose=False, **kwargs):
2209 def write(self, data, flags, backgroundclose=False, **kwargs):
2205 return self._parent.write(self._path, data, flags, **kwargs)
2210 return self._parent.write(self._path, data, flags, **kwargs)
2206
2211
2207 def remove(self, ignoremissing=False):
2212 def remove(self, ignoremissing=False):
2208 return self._parent.remove(self._path)
2213 return self._parent.remove(self._path)
2209
2214
2210 def clearunknown(self):
2215 def clearunknown(self):
2211 pass
2216 pass
2212
2217
2213 class workingcommitctx(workingctx):
2218 class workingcommitctx(workingctx):
2214 """A workingcommitctx object makes access to data related to
2219 """A workingcommitctx object makes access to data related to
2215 the revision being committed convenient.
2220 the revision being committed convenient.
2216
2221
2217 This hides changes in the working directory, if they aren't
2222 This hides changes in the working directory, if they aren't
2218 committed in this context.
2223 committed in this context.
2219 """
2224 """
2220 def __init__(self, repo, changes,
2225 def __init__(self, repo, changes,
2221 text="", user=None, date=None, extra=None):
2226 text="", user=None, date=None, extra=None):
2222 super(workingcommitctx, self).__init__(repo, text, user, date, extra,
2227 super(workingcommitctx, self).__init__(repo, text, user, date, extra,
2223 changes)
2228 changes)
2224
2229
2225 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2230 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2226 """Return matched files only in ``self._status``
2231 """Return matched files only in ``self._status``
2227
2232
2228 Uncommitted files appear "clean" via this context, even if
2233 Uncommitted files appear "clean" via this context, even if
2229 they aren't actually so in the working directory.
2234 they aren't actually so in the working directory.
2230 """
2235 """
2231 if clean:
2236 if clean:
2232 clean = [f for f in self._manifest if f not in self._changedset]
2237 clean = [f for f in self._manifest if f not in self._changedset]
2233 else:
2238 else:
2234 clean = []
2239 clean = []
2235 return scmutil.status([f for f in self._status.modified if match(f)],
2240 return scmutil.status([f for f in self._status.modified if match(f)],
2236 [f for f in self._status.added if match(f)],
2241 [f for f in self._status.added if match(f)],
2237 [f for f in self._status.removed if match(f)],
2242 [f for f in self._status.removed if match(f)],
2238 [], [], [], clean)
2243 [], [], [], clean)
2239
2244
2240 @propertycache
2245 @propertycache
2241 def _changedset(self):
2246 def _changedset(self):
2242 """Return the set of files changed in this context
2247 """Return the set of files changed in this context
2243 """
2248 """
2244 changed = set(self._status.modified)
2249 changed = set(self._status.modified)
2245 changed.update(self._status.added)
2250 changed.update(self._status.added)
2246 changed.update(self._status.removed)
2251 changed.update(self._status.removed)
2247 return changed
2252 return changed
2248
2253
2249 def makecachingfilectxfn(func):
2254 def makecachingfilectxfn(func):
2250 """Create a filectxfn that caches based on the path.
2255 """Create a filectxfn that caches based on the path.
2251
2256
2252 We can't use util.cachefunc because it uses all arguments as the cache
2257 We can't use util.cachefunc because it uses all arguments as the cache
2253 key and this creates a cycle since the arguments include the repo and
2258 key and this creates a cycle since the arguments include the repo and
2254 memctx.
2259 memctx.
2255 """
2260 """
2256 cache = {}
2261 cache = {}
2257
2262
2258 def getfilectx(repo, memctx, path):
2263 def getfilectx(repo, memctx, path):
2259 if path not in cache:
2264 if path not in cache:
2260 cache[path] = func(repo, memctx, path)
2265 cache[path] = func(repo, memctx, path)
2261 return cache[path]
2266 return cache[path]
2262
2267
2263 return getfilectx
2268 return getfilectx
2264
2269
2265 def memfilefromctx(ctx):
2270 def memfilefromctx(ctx):
2266 """Given a context return a memfilectx for ctx[path]
2271 """Given a context return a memfilectx for ctx[path]
2267
2272
2268 This is a convenience method for building a memctx based on another
2273 This is a convenience method for building a memctx based on another
2269 context.
2274 context.
2270 """
2275 """
2271 def getfilectx(repo, memctx, path):
2276 def getfilectx(repo, memctx, path):
2272 fctx = ctx[path]
2277 fctx = ctx[path]
2273 copysource = fctx.copysource()
2278 copysource = fctx.copysource()
2274 return memfilectx(repo, memctx, path, fctx.data(),
2279 return memfilectx(repo, memctx, path, fctx.data(),
2275 islink=fctx.islink(), isexec=fctx.isexec(),
2280 islink=fctx.islink(), isexec=fctx.isexec(),
2276 copysource=copysource)
2281 copysource=copysource)
2277
2282
2278 return getfilectx
2283 return getfilectx
2279
2284
2280 def memfilefrompatch(patchstore):
2285 def memfilefrompatch(patchstore):
2281 """Given a patch (e.g. patchstore object) return a memfilectx
2286 """Given a patch (e.g. patchstore object) return a memfilectx
2282
2287
2283 This is a convenience method for building a memctx based on a patchstore.
2288 This is a convenience method for building a memctx based on a patchstore.
2284 """
2289 """
2285 def getfilectx(repo, memctx, path):
2290 def getfilectx(repo, memctx, path):
2286 data, mode, copysource = patchstore.getfile(path)
2291 data, mode, copysource = patchstore.getfile(path)
2287 if data is None:
2292 if data is None:
2288 return None
2293 return None
2289 islink, isexec = mode
2294 islink, isexec = mode
2290 return memfilectx(repo, memctx, path, data, islink=islink,
2295 return memfilectx(repo, memctx, path, data, islink=islink,
2291 isexec=isexec, copysource=copysource)
2296 isexec=isexec, copysource=copysource)
2292
2297
2293 return getfilectx
2298 return getfilectx
2294
2299
2295 class memctx(committablectx):
2300 class memctx(committablectx):
2296 """Use memctx to perform in-memory commits via localrepo.commitctx().
2301 """Use memctx to perform in-memory commits via localrepo.commitctx().
2297
2302
2298 Revision information is supplied at initialization time while
2303 Revision information is supplied at initialization time while
2299 related files data and is made available through a callback
2304 related files data and is made available through a callback
2300 mechanism. 'repo' is the current localrepo, 'parents' is a
2305 mechanism. 'repo' is the current localrepo, 'parents' is a
2301 sequence of two parent revisions identifiers (pass None for every
2306 sequence of two parent revisions identifiers (pass None for every
2302 missing parent), 'text' is the commit message and 'files' lists
2307 missing parent), 'text' is the commit message and 'files' lists
2303 names of files touched by the revision (normalized and relative to
2308 names of files touched by the revision (normalized and relative to
2304 repository root).
2309 repository root).
2305
2310
2306 filectxfn(repo, memctx, path) is a callable receiving the
2311 filectxfn(repo, memctx, path) is a callable receiving the
2307 repository, the current memctx object and the normalized path of
2312 repository, the current memctx object and the normalized path of
2308 requested file, relative to repository root. It is fired by the
2313 requested file, relative to repository root. It is fired by the
2309 commit function for every file in 'files', but calls order is
2314 commit function for every file in 'files', but calls order is
2310 undefined. If the file is available in the revision being
2315 undefined. If the file is available in the revision being
2311 committed (updated or added), filectxfn returns a memfilectx
2316 committed (updated or added), filectxfn returns a memfilectx
2312 object. If the file was removed, filectxfn return None for recent
2317 object. If the file was removed, filectxfn return None for recent
2313 Mercurial. Moved files are represented by marking the source file
2318 Mercurial. Moved files are represented by marking the source file
2314 removed and the new file added with copy information (see
2319 removed and the new file added with copy information (see
2315 memfilectx).
2320 memfilectx).
2316
2321
2317 user receives the committer name and defaults to current
2322 user receives the committer name and defaults to current
2318 repository username, date is the commit date in any format
2323 repository username, date is the commit date in any format
2319 supported by dateutil.parsedate() and defaults to current date, extra
2324 supported by dateutil.parsedate() and defaults to current date, extra
2320 is a dictionary of metadata or is left empty.
2325 is a dictionary of metadata or is left empty.
2321 """
2326 """
2322
2327
2323 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2328 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2324 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2329 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2325 # this field to determine what to do in filectxfn.
2330 # this field to determine what to do in filectxfn.
2326 _returnnoneformissingfiles = True
2331 _returnnoneformissingfiles = True
2327
2332
2328 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2333 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2329 date=None, extra=None, branch=None, editor=False):
2334 date=None, extra=None, branch=None, editor=False):
2330 super(memctx, self).__init__(repo, text, user, date, extra,
2335 super(memctx, self).__init__(repo, text, user, date, extra,
2331 branch=branch)
2336 branch=branch)
2332 self._rev = None
2337 self._rev = None
2333 self._node = None
2338 self._node = None
2334 parents = [(p or nullid) for p in parents]
2339 parents = [(p or nullid) for p in parents]
2335 p1, p2 = parents
2340 p1, p2 = parents
2336 self._parents = [self._repo[p] for p in (p1, p2)]
2341 self._parents = [self._repo[p] for p in (p1, p2)]
2337 files = sorted(set(files))
2342 files = sorted(set(files))
2338 self._files = files
2343 self._files = files
2339 self.substate = {}
2344 self.substate = {}
2340
2345
2341 if isinstance(filectxfn, patch.filestore):
2346 if isinstance(filectxfn, patch.filestore):
2342 filectxfn = memfilefrompatch(filectxfn)
2347 filectxfn = memfilefrompatch(filectxfn)
2343 elif not callable(filectxfn):
2348 elif not callable(filectxfn):
2344 # if store is not callable, wrap it in a function
2349 # if store is not callable, wrap it in a function
2345 filectxfn = memfilefromctx(filectxfn)
2350 filectxfn = memfilefromctx(filectxfn)
2346
2351
2347 # memoizing increases performance for e.g. vcs convert scenarios.
2352 # memoizing increases performance for e.g. vcs convert scenarios.
2348 self._filectxfn = makecachingfilectxfn(filectxfn)
2353 self._filectxfn = makecachingfilectxfn(filectxfn)
2349
2354
2350 if editor:
2355 if editor:
2351 self._text = editor(self._repo, self, [])
2356 self._text = editor(self._repo, self, [])
2352 self._repo.savecommitmessage(self._text)
2357 self._repo.savecommitmessage(self._text)
2353
2358
2354 def filectx(self, path, filelog=None):
2359 def filectx(self, path, filelog=None):
2355 """get a file context from the working directory
2360 """get a file context from the working directory
2356
2361
2357 Returns None if file doesn't exist and should be removed."""
2362 Returns None if file doesn't exist and should be removed."""
2358 return self._filectxfn(self._repo, self, path)
2363 return self._filectxfn(self._repo, self, path)
2359
2364
2360 def commit(self):
2365 def commit(self):
2361 """commit context to the repo"""
2366 """commit context to the repo"""
2362 return self._repo.commitctx(self)
2367 return self._repo.commitctx(self)
2363
2368
2364 @propertycache
2369 @propertycache
2365 def _manifest(self):
2370 def _manifest(self):
2366 """generate a manifest based on the return values of filectxfn"""
2371 """generate a manifest based on the return values of filectxfn"""
2367
2372
2368 # keep this simple for now; just worry about p1
2373 # keep this simple for now; just worry about p1
2369 pctx = self._parents[0]
2374 pctx = self._parents[0]
2370 man = pctx.manifest().copy()
2375 man = pctx.manifest().copy()
2371
2376
2372 for f in self._status.modified:
2377 for f in self._status.modified:
2373 man[f] = modifiednodeid
2378 man[f] = modifiednodeid
2374
2379
2375 for f in self._status.added:
2380 for f in self._status.added:
2376 man[f] = addednodeid
2381 man[f] = addednodeid
2377
2382
2378 for f in self._status.removed:
2383 for f in self._status.removed:
2379 if f in man:
2384 if f in man:
2380 del man[f]
2385 del man[f]
2381
2386
2382 return man
2387 return man
2383
2388
2384 @propertycache
2389 @propertycache
2385 def _status(self):
2390 def _status(self):
2386 """Calculate exact status from ``files`` specified at construction
2391 """Calculate exact status from ``files`` specified at construction
2387 """
2392 """
2388 man1 = self.p1().manifest()
2393 man1 = self.p1().manifest()
2389 p2 = self._parents[1]
2394 p2 = self._parents[1]
2390 # "1 < len(self._parents)" can't be used for checking
2395 # "1 < len(self._parents)" can't be used for checking
2391 # existence of the 2nd parent, because "memctx._parents" is
2396 # existence of the 2nd parent, because "memctx._parents" is
2392 # explicitly initialized by the list, of which length is 2.
2397 # explicitly initialized by the list, of which length is 2.
2393 if p2.node() != nullid:
2398 if p2.node() != nullid:
2394 man2 = p2.manifest()
2399 man2 = p2.manifest()
2395 managing = lambda f: f in man1 or f in man2
2400 managing = lambda f: f in man1 or f in man2
2396 else:
2401 else:
2397 managing = lambda f: f in man1
2402 managing = lambda f: f in man1
2398
2403
2399 modified, added, removed = [], [], []
2404 modified, added, removed = [], [], []
2400 for f in self._files:
2405 for f in self._files:
2401 if not managing(f):
2406 if not managing(f):
2402 added.append(f)
2407 added.append(f)
2403 elif self[f]:
2408 elif self[f]:
2404 modified.append(f)
2409 modified.append(f)
2405 else:
2410 else:
2406 removed.append(f)
2411 removed.append(f)
2407
2412
2408 return scmutil.status(modified, added, removed, [], [], [], [])
2413 return scmutil.status(modified, added, removed, [], [], [], [])
2409
2414
2410 class memfilectx(committablefilectx):
2415 class memfilectx(committablefilectx):
2411 """memfilectx represents an in-memory file to commit.
2416 """memfilectx represents an in-memory file to commit.
2412
2417
2413 See memctx and committablefilectx for more details.
2418 See memctx and committablefilectx for more details.
2414 """
2419 """
2415 def __init__(self, repo, changectx, path, data, islink=False,
2420 def __init__(self, repo, changectx, path, data, islink=False,
2416 isexec=False, copysource=None):
2421 isexec=False, copysource=None):
2417 """
2422 """
2418 path is the normalized file path relative to repository root.
2423 path is the normalized file path relative to repository root.
2419 data is the file content as a string.
2424 data is the file content as a string.
2420 islink is True if the file is a symbolic link.
2425 islink is True if the file is a symbolic link.
2421 isexec is True if the file is executable.
2426 isexec is True if the file is executable.
2422 copied is the source file path if current file was copied in the
2427 copied is the source file path if current file was copied in the
2423 revision being committed, or None."""
2428 revision being committed, or None."""
2424 super(memfilectx, self).__init__(repo, path, None, changectx)
2429 super(memfilectx, self).__init__(repo, path, None, changectx)
2425 self._data = data
2430 self._data = data
2426 if islink:
2431 if islink:
2427 self._flags = 'l'
2432 self._flags = 'l'
2428 elif isexec:
2433 elif isexec:
2429 self._flags = 'x'
2434 self._flags = 'x'
2430 else:
2435 else:
2431 self._flags = ''
2436 self._flags = ''
2432 self._copysource = copysource
2437 self._copysource = copysource
2433
2438
2434 def copysource(self):
2439 def copysource(self):
2435 return self._copysource
2440 return self._copysource
2436
2441
2437 def cmp(self, fctx):
2442 def cmp(self, fctx):
2438 return self.data() != fctx.data()
2443 return self.data() != fctx.data()
2439
2444
2440 def data(self):
2445 def data(self):
2441 return self._data
2446 return self._data
2442
2447
2443 def remove(self, ignoremissing=False):
2448 def remove(self, ignoremissing=False):
2444 """wraps unlink for a repo's working directory"""
2449 """wraps unlink for a repo's working directory"""
2445 # need to figure out what to do here
2450 # need to figure out what to do here
2446 del self._changectx[self._path]
2451 del self._changectx[self._path]
2447
2452
2448 def write(self, data, flags, **kwargs):
2453 def write(self, data, flags, **kwargs):
2449 """wraps repo.wwrite"""
2454 """wraps repo.wwrite"""
2450 self._data = data
2455 self._data = data
2451
2456
2452
2457
2453 class metadataonlyctx(committablectx):
2458 class metadataonlyctx(committablectx):
2454 """Like memctx but it's reusing the manifest of different commit.
2459 """Like memctx but it's reusing the manifest of different commit.
2455 Intended to be used by lightweight operations that are creating
2460 Intended to be used by lightweight operations that are creating
2456 metadata-only changes.
2461 metadata-only changes.
2457
2462
2458 Revision information is supplied at initialization time. 'repo' is the
2463 Revision information is supplied at initialization time. 'repo' is the
2459 current localrepo, 'ctx' is original revision which manifest we're reuisng
2464 current localrepo, 'ctx' is original revision which manifest we're reuisng
2460 'parents' is a sequence of two parent revisions identifiers (pass None for
2465 'parents' is a sequence of two parent revisions identifiers (pass None for
2461 every missing parent), 'text' is the commit.
2466 every missing parent), 'text' is the commit.
2462
2467
2463 user receives the committer name and defaults to current repository
2468 user receives the committer name and defaults to current repository
2464 username, date is the commit date in any format supported by
2469 username, date is the commit date in any format supported by
2465 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2470 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2466 metadata or is left empty.
2471 metadata or is left empty.
2467 """
2472 """
2468 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2473 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2469 date=None, extra=None, editor=False):
2474 date=None, extra=None, editor=False):
2470 if text is None:
2475 if text is None:
2471 text = originalctx.description()
2476 text = originalctx.description()
2472 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2477 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2473 self._rev = None
2478 self._rev = None
2474 self._node = None
2479 self._node = None
2475 self._originalctx = originalctx
2480 self._originalctx = originalctx
2476 self._manifestnode = originalctx.manifestnode()
2481 self._manifestnode = originalctx.manifestnode()
2477 if parents is None:
2482 if parents is None:
2478 parents = originalctx.parents()
2483 parents = originalctx.parents()
2479 else:
2484 else:
2480 parents = [repo[p] for p in parents if p is not None]
2485 parents = [repo[p] for p in parents if p is not None]
2481 parents = parents[:]
2486 parents = parents[:]
2482 while len(parents) < 2:
2487 while len(parents) < 2:
2483 parents.append(repo[nullid])
2488 parents.append(repo[nullid])
2484 p1, p2 = self._parents = parents
2489 p1, p2 = self._parents = parents
2485
2490
2486 # sanity check to ensure that the reused manifest parents are
2491 # sanity check to ensure that the reused manifest parents are
2487 # manifests of our commit parents
2492 # manifests of our commit parents
2488 mp1, mp2 = self.manifestctx().parents
2493 mp1, mp2 = self.manifestctx().parents
2489 if p1 != nullid and p1.manifestnode() != mp1:
2494 if p1 != nullid and p1.manifestnode() != mp1:
2490 raise RuntimeError(r"can't reuse the manifest: its p1 "
2495 raise RuntimeError(r"can't reuse the manifest: its p1 "
2491 r"doesn't match the new ctx p1")
2496 r"doesn't match the new ctx p1")
2492 if p2 != nullid and p2.manifestnode() != mp2:
2497 if p2 != nullid and p2.manifestnode() != mp2:
2493 raise RuntimeError(r"can't reuse the manifest: "
2498 raise RuntimeError(r"can't reuse the manifest: "
2494 r"its p2 doesn't match the new ctx p2")
2499 r"its p2 doesn't match the new ctx p2")
2495
2500
2496 self._files = originalctx.files()
2501 self._files = originalctx.files()
2497 self.substate = {}
2502 self.substate = {}
2498
2503
2499 if editor:
2504 if editor:
2500 self._text = editor(self._repo, self, [])
2505 self._text = editor(self._repo, self, [])
2501 self._repo.savecommitmessage(self._text)
2506 self._repo.savecommitmessage(self._text)
2502
2507
2503 def manifestnode(self):
2508 def manifestnode(self):
2504 return self._manifestnode
2509 return self._manifestnode
2505
2510
2506 @property
2511 @property
2507 def _manifestctx(self):
2512 def _manifestctx(self):
2508 return self._repo.manifestlog[self._manifestnode]
2513 return self._repo.manifestlog[self._manifestnode]
2509
2514
2510 def filectx(self, path, filelog=None):
2515 def filectx(self, path, filelog=None):
2511 return self._originalctx.filectx(path, filelog=filelog)
2516 return self._originalctx.filectx(path, filelog=filelog)
2512
2517
2513 def commit(self):
2518 def commit(self):
2514 """commit context to the repo"""
2519 """commit context to the repo"""
2515 return self._repo.commitctx(self)
2520 return self._repo.commitctx(self)
2516
2521
2517 @property
2522 @property
2518 def _manifest(self):
2523 def _manifest(self):
2519 return self._originalctx.manifest()
2524 return self._originalctx.manifest()
2520
2525
2521 @propertycache
2526 @propertycache
2522 def _status(self):
2527 def _status(self):
2523 """Calculate exact status from ``files`` specified in the ``origctx``
2528 """Calculate exact status from ``files`` specified in the ``origctx``
2524 and parents manifests.
2529 and parents manifests.
2525 """
2530 """
2526 man1 = self.p1().manifest()
2531 man1 = self.p1().manifest()
2527 p2 = self._parents[1]
2532 p2 = self._parents[1]
2528 # "1 < len(self._parents)" can't be used for checking
2533 # "1 < len(self._parents)" can't be used for checking
2529 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2534 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2530 # explicitly initialized by the list, of which length is 2.
2535 # explicitly initialized by the list, of which length is 2.
2531 if p2.node() != nullid:
2536 if p2.node() != nullid:
2532 man2 = p2.manifest()
2537 man2 = p2.manifest()
2533 managing = lambda f: f in man1 or f in man2
2538 managing = lambda f: f in man1 or f in man2
2534 else:
2539 else:
2535 managing = lambda f: f in man1
2540 managing = lambda f: f in man1
2536
2541
2537 modified, added, removed = [], [], []
2542 modified, added, removed = [], [], []
2538 for f in self._files:
2543 for f in self._files:
2539 if not managing(f):
2544 if not managing(f):
2540 added.append(f)
2545 added.append(f)
2541 elif f in self:
2546 elif f in self:
2542 modified.append(f)
2547 modified.append(f)
2543 else:
2548 else:
2544 removed.append(f)
2549 removed.append(f)
2545
2550
2546 return scmutil.status(modified, added, removed, [], [], [], [])
2551 return scmutil.status(modified, added, removed, [], [], [], [])
2547
2552
2548 class arbitraryfilectx(object):
2553 class arbitraryfilectx(object):
2549 """Allows you to use filectx-like functions on a file in an arbitrary
2554 """Allows you to use filectx-like functions on a file in an arbitrary
2550 location on disk, possibly not in the working directory.
2555 location on disk, possibly not in the working directory.
2551 """
2556 """
2552 def __init__(self, path, repo=None):
2557 def __init__(self, path, repo=None):
2553 # Repo is optional because contrib/simplemerge uses this class.
2558 # Repo is optional because contrib/simplemerge uses this class.
2554 self._repo = repo
2559 self._repo = repo
2555 self._path = path
2560 self._path = path
2556
2561
2557 def cmp(self, fctx):
2562 def cmp(self, fctx):
2558 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2563 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2559 # path if either side is a symlink.
2564 # path if either side is a symlink.
2560 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2565 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2561 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2566 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2562 # Add a fast-path for merge if both sides are disk-backed.
2567 # Add a fast-path for merge if both sides are disk-backed.
2563 # Note that filecmp uses the opposite return values (True if same)
2568 # Note that filecmp uses the opposite return values (True if same)
2564 # from our cmp functions (True if different).
2569 # from our cmp functions (True if different).
2565 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2570 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2566 return self.data() != fctx.data()
2571 return self.data() != fctx.data()
2567
2572
2568 def path(self):
2573 def path(self):
2569 return self._path
2574 return self._path
2570
2575
2571 def flags(self):
2576 def flags(self):
2572 return ''
2577 return ''
2573
2578
2574 def data(self):
2579 def data(self):
2575 return util.readfile(self._path)
2580 return util.readfile(self._path)
2576
2581
2577 def decodeddata(self):
2582 def decodeddata(self):
2578 with open(self._path, "rb") as f:
2583 with open(self._path, "rb") as f:
2579 return f.read()
2584 return f.read()
2580
2585
2581 def remove(self):
2586 def remove(self):
2582 util.unlink(self._path)
2587 util.unlink(self._path)
2583
2588
2584 def write(self, data, flags, **kwargs):
2589 def write(self, data, flags, **kwargs):
2585 assert not flags
2590 assert not flags
2586 with open(self._path, "wb") as f:
2591 with open(self._path, "wb") as f:
2587 f.write(data)
2592 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now