##// END OF EJS Templates
context: delete an obsolete comment...
Martin von Zweigbergk -
r38798:e971d6eb default
parent child Browse files
Show More
@@ -1,2557 +1,2556 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirfilenodeids,
24 wdirfilenodeids,
25 wdirid,
25 wdirid,
26 )
26 )
27 from . import (
27 from . import (
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 obsolete as obsmod,
33 obsolete as obsmod,
34 patch,
34 patch,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 pycompat,
37 pycompat,
38 repoview,
38 repoview,
39 revlog,
39 revlog,
40 scmutil,
40 scmutil,
41 sparse,
41 sparse,
42 subrepo,
42 subrepo,
43 subrepoutil,
43 subrepoutil,
44 util,
44 util,
45 )
45 )
46 from .utils import (
46 from .utils import (
47 dateutil,
47 dateutil,
48 stringutil,
48 stringutil,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 class basectx(object):
53 class basectx(object):
54 """A basectx object represents the common logic for its children:
54 """A basectx object represents the common logic for its children:
55 changectx: read-only context that is already present in the repo,
55 changectx: read-only context that is already present in the repo,
56 workingctx: a context that represents the working directory and can
56 workingctx: a context that represents the working directory and can
57 be committed,
57 be committed,
58 memctx: a context that represents changes in-memory and can also
58 memctx: a context that represents changes in-memory and can also
59 be committed."""
59 be committed."""
60
60
61 def __init__(self, repo):
61 def __init__(self, repo):
62 self._repo = repo
62 self._repo = repo
63
63
64 def __bytes__(self):
64 def __bytes__(self):
65 return short(self.node())
65 return short(self.node())
66
66
67 __str__ = encoding.strmethod(__bytes__)
67 __str__ = encoding.strmethod(__bytes__)
68
68
69 def __repr__(self):
69 def __repr__(self):
70 return r"<%s %s>" % (type(self).__name__, str(self))
70 return r"<%s %s>" % (type(self).__name__, str(self))
71
71
72 def __eq__(self, other):
72 def __eq__(self, other):
73 try:
73 try:
74 return type(self) == type(other) and self._rev == other._rev
74 return type(self) == type(other) and self._rev == other._rev
75 except AttributeError:
75 except AttributeError:
76 return False
76 return False
77
77
78 def __ne__(self, other):
78 def __ne__(self, other):
79 return not (self == other)
79 return not (self == other)
80
80
81 def __contains__(self, key):
81 def __contains__(self, key):
82 return key in self._manifest
82 return key in self._manifest
83
83
84 def __getitem__(self, key):
84 def __getitem__(self, key):
85 return self.filectx(key)
85 return self.filectx(key)
86
86
87 def __iter__(self):
87 def __iter__(self):
88 return iter(self._manifest)
88 return iter(self._manifest)
89
89
90 def _buildstatusmanifest(self, status):
90 def _buildstatusmanifest(self, status):
91 """Builds a manifest that includes the given status results, if this is
91 """Builds a manifest that includes the given status results, if this is
92 a working copy context. For non-working copy contexts, it just returns
92 a working copy context. For non-working copy contexts, it just returns
93 the normal manifest."""
93 the normal manifest."""
94 return self.manifest()
94 return self.manifest()
95
95
96 def _matchstatus(self, other, match):
96 def _matchstatus(self, other, match):
97 """This internal method provides a way for child objects to override the
97 """This internal method provides a way for child objects to override the
98 match operator.
98 match operator.
99 """
99 """
100 return match
100 return match
101
101
102 def _buildstatus(self, other, s, match, listignored, listclean,
102 def _buildstatus(self, other, s, match, listignored, listclean,
103 listunknown):
103 listunknown):
104 """build a status with respect to another context"""
104 """build a status with respect to another context"""
105 # Load earliest manifest first for caching reasons. More specifically,
105 # Load earliest manifest first for caching reasons. More specifically,
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
108 # 1000 and cache it so that when you read 1001, we just need to apply a
108 # 1000 and cache it so that when you read 1001, we just need to apply a
109 # delta to what's in the cache. So that's one full reconstruction + one
109 # delta to what's in the cache. So that's one full reconstruction + one
110 # delta application.
110 # delta application.
111 mf2 = None
111 mf2 = None
112 if self.rev() is not None and self.rev() < other.rev():
112 if self.rev() is not None and self.rev() < other.rev():
113 mf2 = self._buildstatusmanifest(s)
113 mf2 = self._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
115 if mf2 is None:
115 if mf2 is None:
116 mf2 = self._buildstatusmanifest(s)
116 mf2 = self._buildstatusmanifest(s)
117
117
118 modified, added = [], []
118 modified, added = [], []
119 removed = []
119 removed = []
120 clean = []
120 clean = []
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
122 deletedset = set(deleted)
122 deletedset = set(deleted)
123 d = mf1.diff(mf2, match=match, clean=listclean)
123 d = mf1.diff(mf2, match=match, clean=listclean)
124 for fn, value in d.iteritems():
124 for fn, value in d.iteritems():
125 if fn in deletedset:
125 if fn in deletedset:
126 continue
126 continue
127 if value is None:
127 if value is None:
128 clean.append(fn)
128 clean.append(fn)
129 continue
129 continue
130 (node1, flag1), (node2, flag2) = value
130 (node1, flag1), (node2, flag2) = value
131 if node1 is None:
131 if node1 is None:
132 added.append(fn)
132 added.append(fn)
133 elif node2 is None:
133 elif node2 is None:
134 removed.append(fn)
134 removed.append(fn)
135 elif flag1 != flag2:
135 elif flag1 != flag2:
136 modified.append(fn)
136 modified.append(fn)
137 elif node2 not in wdirfilenodeids:
137 elif node2 not in wdirfilenodeids:
138 # When comparing files between two commits, we save time by
138 # When comparing files between two commits, we save time by
139 # not comparing the file contents when the nodeids differ.
139 # not comparing the file contents when the nodeids differ.
140 # Note that this means we incorrectly report a reverted change
140 # Note that this means we incorrectly report a reverted change
141 # to a file as a modification.
141 # to a file as a modification.
142 modified.append(fn)
142 modified.append(fn)
143 elif self[fn].cmp(other[fn]):
143 elif self[fn].cmp(other[fn]):
144 modified.append(fn)
144 modified.append(fn)
145 else:
145 else:
146 clean.append(fn)
146 clean.append(fn)
147
147
148 if removed:
148 if removed:
149 # need to filter files if they are already reported as removed
149 # need to filter files if they are already reported as removed
150 unknown = [fn for fn in unknown if fn not in mf1 and
150 unknown = [fn for fn in unknown if fn not in mf1 and
151 (not match or match(fn))]
151 (not match or match(fn))]
152 ignored = [fn for fn in ignored if fn not in mf1 and
152 ignored = [fn for fn in ignored if fn not in mf1 and
153 (not match or match(fn))]
153 (not match or match(fn))]
154 # if they're deleted, don't report them as removed
154 # if they're deleted, don't report them as removed
155 removed = [fn for fn in removed if fn not in deletedset]
155 removed = [fn for fn in removed if fn not in deletedset]
156
156
157 return scmutil.status(modified, added, removed, deleted, unknown,
157 return scmutil.status(modified, added, removed, deleted, unknown,
158 ignored, clean)
158 ignored, clean)
159
159
160 @propertycache
160 @propertycache
161 def substate(self):
161 def substate(self):
162 return subrepoutil.state(self, self._repo.ui)
162 return subrepoutil.state(self, self._repo.ui)
163
163
164 def subrev(self, subpath):
164 def subrev(self, subpath):
165 return self.substate[subpath][1]
165 return self.substate[subpath][1]
166
166
167 def rev(self):
167 def rev(self):
168 return self._rev
168 return self._rev
169 def node(self):
169 def node(self):
170 return self._node
170 return self._node
171 def hex(self):
171 def hex(self):
172 return hex(self.node())
172 return hex(self.node())
173 def manifest(self):
173 def manifest(self):
174 return self._manifest
174 return self._manifest
175 def manifestctx(self):
175 def manifestctx(self):
176 return self._manifestctx
176 return self._manifestctx
177 def repo(self):
177 def repo(self):
178 return self._repo
178 return self._repo
179 def phasestr(self):
179 def phasestr(self):
180 return phases.phasenames[self.phase()]
180 return phases.phasenames[self.phase()]
181 def mutable(self):
181 def mutable(self):
182 return self.phase() > phases.public
182 return self.phase() > phases.public
183
183
184 def matchfileset(self, expr, badfn=None):
184 def matchfileset(self, expr, badfn=None):
185 return fileset.match(self, expr, badfn=badfn)
185 return fileset.match(self, expr, badfn=badfn)
186
186
187 def obsolete(self):
187 def obsolete(self):
188 """True if the changeset is obsolete"""
188 """True if the changeset is obsolete"""
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
190
190
191 def extinct(self):
191 def extinct(self):
192 """True if the changeset is extinct"""
192 """True if the changeset is extinct"""
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
194
194
195 def orphan(self):
195 def orphan(self):
196 """True if the changeset is not obsolete but it's ancestor are"""
196 """True if the changeset is not obsolete but it's ancestor are"""
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
198
198
199 def phasedivergent(self):
199 def phasedivergent(self):
200 """True if the changeset try to be a successor of a public changeset
200 """True if the changeset try to be a successor of a public changeset
201
201
202 Only non-public and non-obsolete changesets may be bumped.
202 Only non-public and non-obsolete changesets may be bumped.
203 """
203 """
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
205
205
206 def contentdivergent(self):
206 def contentdivergent(self):
207 """Is a successors of a changeset with multiple possible successors set
207 """Is a successors of a changeset with multiple possible successors set
208
208
209 Only non-public and non-obsolete changesets may be divergent.
209 Only non-public and non-obsolete changesets may be divergent.
210 """
210 """
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
212
212
213 def isunstable(self):
213 def isunstable(self):
214 """True if the changeset is either unstable, bumped or divergent"""
214 """True if the changeset is either unstable, bumped or divergent"""
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
216
216
217 def instabilities(self):
217 def instabilities(self):
218 """return the list of instabilities affecting this changeset.
218 """return the list of instabilities affecting this changeset.
219
219
220 Instabilities are returned as strings. possible values are:
220 Instabilities are returned as strings. possible values are:
221 - orphan,
221 - orphan,
222 - phase-divergent,
222 - phase-divergent,
223 - content-divergent.
223 - content-divergent.
224 """
224 """
225 instabilities = []
225 instabilities = []
226 if self.orphan():
226 if self.orphan():
227 instabilities.append('orphan')
227 instabilities.append('orphan')
228 if self.phasedivergent():
228 if self.phasedivergent():
229 instabilities.append('phase-divergent')
229 instabilities.append('phase-divergent')
230 if self.contentdivergent():
230 if self.contentdivergent():
231 instabilities.append('content-divergent')
231 instabilities.append('content-divergent')
232 return instabilities
232 return instabilities
233
233
234 def parents(self):
234 def parents(self):
235 """return contexts for each parent changeset"""
235 """return contexts for each parent changeset"""
236 return self._parents
236 return self._parents
237
237
238 def p1(self):
238 def p1(self):
239 return self._parents[0]
239 return self._parents[0]
240
240
241 def p2(self):
241 def p2(self):
242 parents = self._parents
242 parents = self._parents
243 if len(parents) == 2:
243 if len(parents) == 2:
244 return parents[1]
244 return parents[1]
245 return changectx(self._repo, nullrev)
245 return changectx(self._repo, nullrev)
246
246
247 def _fileinfo(self, path):
247 def _fileinfo(self, path):
248 if r'_manifest' in self.__dict__:
248 if r'_manifest' in self.__dict__:
249 try:
249 try:
250 return self._manifest[path], self._manifest.flags(path)
250 return self._manifest[path], self._manifest.flags(path)
251 except KeyError:
251 except KeyError:
252 raise error.ManifestLookupError(self._node, path,
252 raise error.ManifestLookupError(self._node, path,
253 _('not found in manifest'))
253 _('not found in manifest'))
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
255 if path in self._manifestdelta:
255 if path in self._manifestdelta:
256 return (self._manifestdelta[path],
256 return (self._manifestdelta[path],
257 self._manifestdelta.flags(path))
257 self._manifestdelta.flags(path))
258 mfl = self._repo.manifestlog
258 mfl = self._repo.manifestlog
259 try:
259 try:
260 node, flag = mfl[self._changeset.manifest].find(path)
260 node, flag = mfl[self._changeset.manifest].find(path)
261 except KeyError:
261 except KeyError:
262 raise error.ManifestLookupError(self._node, path,
262 raise error.ManifestLookupError(self._node, path,
263 _('not found in manifest'))
263 _('not found in manifest'))
264
264
265 return node, flag
265 return node, flag
266
266
267 def filenode(self, path):
267 def filenode(self, path):
268 return self._fileinfo(path)[0]
268 return self._fileinfo(path)[0]
269
269
270 def flags(self, path):
270 def flags(self, path):
271 try:
271 try:
272 return self._fileinfo(path)[1]
272 return self._fileinfo(path)[1]
273 except error.LookupError:
273 except error.LookupError:
274 return ''
274 return ''
275
275
276 def sub(self, path, allowcreate=True):
276 def sub(self, path, allowcreate=True):
277 '''return a subrepo for the stored revision of path, never wdir()'''
277 '''return a subrepo for the stored revision of path, never wdir()'''
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
279
279
280 def nullsub(self, path, pctx):
280 def nullsub(self, path, pctx):
281 return subrepo.nullsubrepo(self, path, pctx)
281 return subrepo.nullsubrepo(self, path, pctx)
282
282
283 def workingsub(self, path):
283 def workingsub(self, path):
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
285 context.
285 context.
286 '''
286 '''
287 return subrepo.subrepo(self, path, allowwdir=True)
287 return subrepo.subrepo(self, path, allowwdir=True)
288
288
289 def match(self, pats=None, include=None, exclude=None, default='glob',
289 def match(self, pats=None, include=None, exclude=None, default='glob',
290 listsubrepos=False, badfn=None):
290 listsubrepos=False, badfn=None):
291 r = self._repo
291 r = self._repo
292 return matchmod.match(r.root, r.getcwd(), pats,
292 return matchmod.match(r.root, r.getcwd(), pats,
293 include, exclude, default,
293 include, exclude, default,
294 auditor=r.nofsauditor, ctx=self,
294 auditor=r.nofsauditor, ctx=self,
295 listsubrepos=listsubrepos, badfn=badfn)
295 listsubrepos=listsubrepos, badfn=badfn)
296
296
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
299 hunksfilterfn=None):
299 hunksfilterfn=None):
300 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
301 if ctx2 is None:
301 if ctx2 is None:
302 ctx2 = self.p1()
302 ctx2 = self.p1()
303 if ctx2 is not None:
303 if ctx2 is not None:
304 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
305 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
306 opts=opts, losedatafn=losedatafn, prefix=prefix,
307 relroot=relroot, copy=copy,
307 relroot=relroot, copy=copy,
308 hunksfilterfn=hunksfilterfn)
308 hunksfilterfn=hunksfilterfn)
309
309
310 def dirs(self):
310 def dirs(self):
311 return self._manifest.dirs()
311 return self._manifest.dirs()
312
312
313 def hasdir(self, dir):
313 def hasdir(self, dir):
314 return self._manifest.hasdir(dir)
314 return self._manifest.hasdir(dir)
315
315
316 def status(self, other=None, match=None, listignored=False,
316 def status(self, other=None, match=None, listignored=False,
317 listclean=False, listunknown=False, listsubrepos=False):
317 listclean=False, listunknown=False, listsubrepos=False):
318 """return status of files between two nodes or node and working
318 """return status of files between two nodes or node and working
319 directory.
319 directory.
320
320
321 If other is None, compare this node with working directory.
321 If other is None, compare this node with working directory.
322
322
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
323 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 """
324 """
325
325
326 ctx1 = self
326 ctx1 = self
327 ctx2 = self._repo[other]
327 ctx2 = self._repo[other]
328
328
329 # This next code block is, admittedly, fragile logic that tests for
329 # This next code block is, admittedly, fragile logic that tests for
330 # reversing the contexts and wouldn't need to exist if it weren't for
330 # reversing the contexts and wouldn't need to exist if it weren't for
331 # the fast (and common) code path of comparing the working directory
331 # the fast (and common) code path of comparing the working directory
332 # with its first parent.
332 # with its first parent.
333 #
333 #
334 # What we're aiming for here is the ability to call:
334 # What we're aiming for here is the ability to call:
335 #
335 #
336 # workingctx.status(parentctx)
336 # workingctx.status(parentctx)
337 #
337 #
338 # If we always built the manifest for each context and compared those,
338 # If we always built the manifest for each context and compared those,
339 # then we'd be done. But the special case of the above call means we
339 # then we'd be done. But the special case of the above call means we
340 # just copy the manifest of the parent.
340 # just copy the manifest of the parent.
341 reversed = False
341 reversed = False
342 if (not isinstance(ctx1, changectx)
342 if (not isinstance(ctx1, changectx)
343 and isinstance(ctx2, changectx)):
343 and isinstance(ctx2, changectx)):
344 reversed = True
344 reversed = True
345 ctx1, ctx2 = ctx2, ctx1
345 ctx1, ctx2 = ctx2, ctx1
346
346
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
347 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
348 match = ctx2._matchstatus(ctx1, match)
348 match = ctx2._matchstatus(ctx1, match)
349 r = scmutil.status([], [], [], [], [], [], [])
349 r = scmutil.status([], [], [], [], [], [], [])
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 listunknown)
351 listunknown)
352
352
353 if reversed:
353 if reversed:
354 # Reverse added and removed. Clear deleted, unknown and ignored as
354 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # these make no sense to reverse.
355 # these make no sense to reverse.
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r.clean)
357 r.clean)
358
358
359 if listsubrepos:
359 if listsubrepos:
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 try:
361 try:
362 rev2 = ctx2.subrev(subpath)
362 rev2 = ctx2.subrev(subpath)
363 except KeyError:
363 except KeyError:
364 # A subrepo that existed in node1 was deleted between
364 # A subrepo that existed in node1 was deleted between
365 # node1 and node2 (inclusive). Thus, ctx2's substate
365 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # won't contain that subpath. The best we can do ignore it.
366 # won't contain that subpath. The best we can do ignore it.
367 rev2 = None
367 rev2 = None
368 submatch = matchmod.subdirmatcher(subpath, match)
368 submatch = matchmod.subdirmatcher(subpath, match)
369 s = sub.status(rev2, match=submatch, ignored=listignored,
369 s = sub.status(rev2, match=submatch, ignored=listignored,
370 clean=listclean, unknown=listunknown,
370 clean=listclean, unknown=listunknown,
371 listsubrepos=True)
371 listsubrepos=True)
372 for rfiles, sfiles in zip(r, s):
372 for rfiles, sfiles in zip(r, s):
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374
374
375 for l in r:
375 for l in r:
376 l.sort()
376 l.sort()
377
377
378 return r
378 return r
379
379
380 class changectx(basectx):
380 class changectx(basectx):
381 """A changecontext object makes access to data related to a particular
381 """A changecontext object makes access to data related to a particular
382 changeset convenient. It represents a read-only context already present in
382 changeset convenient. It represents a read-only context already present in
383 the repo."""
383 the repo."""
384 def __init__(self, repo, changeid='.'):
384 def __init__(self, repo, changeid='.'):
385 """changeid is a revision number, node, or tag"""
385 """changeid is a revision number, node, or tag"""
386 super(changectx, self).__init__(repo)
386 super(changectx, self).__init__(repo)
387
387
388 try:
388 try:
389 if isinstance(changeid, int):
389 if isinstance(changeid, int):
390 self._node = repo.changelog.node(changeid)
390 self._node = repo.changelog.node(changeid)
391 self._rev = changeid
391 self._rev = changeid
392 return
392 return
393 elif changeid == 'null':
393 elif changeid == 'null':
394 self._node = nullid
394 self._node = nullid
395 self._rev = nullrev
395 self._rev = nullrev
396 return
396 return
397 elif changeid == 'tip':
397 elif changeid == 'tip':
398 self._node = repo.changelog.tip()
398 self._node = repo.changelog.tip()
399 self._rev = repo.changelog.rev(self._node)
399 self._rev = repo.changelog.rev(self._node)
400 return
400 return
401 elif (changeid == '.'
401 elif (changeid == '.'
402 or repo.local() and changeid == repo.dirstate.p1()):
402 or repo.local() and changeid == repo.dirstate.p1()):
403 # this is a hack to delay/avoid loading obsmarkers
403 # this is a hack to delay/avoid loading obsmarkers
404 # when we know that '.' won't be hidden
404 # when we know that '.' won't be hidden
405 self._node = repo.dirstate.p1()
405 self._node = repo.dirstate.p1()
406 self._rev = repo.unfiltered().changelog.rev(self._node)
406 self._rev = repo.unfiltered().changelog.rev(self._node)
407 return
407 return
408 elif len(changeid) == 20:
408 elif len(changeid) == 20:
409 try:
409 try:
410 self._node = changeid
410 self._node = changeid
411 self._rev = repo.changelog.rev(changeid)
411 self._rev = repo.changelog.rev(changeid)
412 return
412 return
413 except error.FilteredLookupError:
413 except error.FilteredLookupError:
414 changeid = hex(changeid) # for the error message
414 changeid = hex(changeid) # for the error message
415 raise
415 raise
416 except LookupError:
416 except LookupError:
417 # check if it might have come from damaged dirstate
417 # check if it might have come from damaged dirstate
418 #
418 #
419 # XXX we could avoid the unfiltered if we had a recognizable
419 # XXX we could avoid the unfiltered if we had a recognizable
420 # exception for filtered changeset access
420 # exception for filtered changeset access
421 if (repo.local()
421 if (repo.local()
422 and changeid in repo.unfiltered().dirstate.parents()):
422 and changeid in repo.unfiltered().dirstate.parents()):
423 msg = _("working directory has unknown parent '%s'!")
423 msg = _("working directory has unknown parent '%s'!")
424 raise error.Abort(msg % short(changeid))
424 raise error.Abort(msg % short(changeid))
425 changeid = hex(changeid) # for the error message
425 changeid = hex(changeid) # for the error message
426
426
427 elif len(changeid) == 40:
427 elif len(changeid) == 40:
428 try:
428 try:
429 self._node = bin(changeid)
429 self._node = bin(changeid)
430 self._rev = repo.changelog.rev(self._node)
430 self._rev = repo.changelog.rev(self._node)
431 return
431 return
432 except error.FilteredLookupError:
432 except error.FilteredLookupError:
433 raise
433 raise
434 except (TypeError, LookupError):
434 except (TypeError, LookupError):
435 pass
435 pass
436 else:
436 else:
437 raise error.ProgrammingError(
437 raise error.ProgrammingError(
438 "unsupported changeid '%s' of type %s" %
438 "unsupported changeid '%s' of type %s" %
439 (changeid, type(changeid)))
439 (changeid, type(changeid)))
440
440
441 # lookup failed
442 except (error.FilteredIndexError, error.FilteredLookupError):
441 except (error.FilteredIndexError, error.FilteredLookupError):
443 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
442 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
444 % pycompat.bytestr(changeid))
443 % pycompat.bytestr(changeid))
445 except error.FilteredRepoLookupError:
444 except error.FilteredRepoLookupError:
446 raise
445 raise
447 except IndexError:
446 except IndexError:
448 pass
447 pass
449 raise error.RepoLookupError(
448 raise error.RepoLookupError(
450 _("unknown revision '%s'") % changeid)
449 _("unknown revision '%s'") % changeid)
451
450
452 def __hash__(self):
451 def __hash__(self):
453 try:
452 try:
454 return hash(self._rev)
453 return hash(self._rev)
455 except AttributeError:
454 except AttributeError:
456 return id(self)
455 return id(self)
457
456
458 def __nonzero__(self):
457 def __nonzero__(self):
459 return self._rev != nullrev
458 return self._rev != nullrev
460
459
461 __bool__ = __nonzero__
460 __bool__ = __nonzero__
462
461
463 @propertycache
462 @propertycache
464 def _changeset(self):
463 def _changeset(self):
465 return self._repo.changelog.changelogrevision(self.rev())
464 return self._repo.changelog.changelogrevision(self.rev())
466
465
467 @propertycache
466 @propertycache
468 def _manifest(self):
467 def _manifest(self):
469 return self._manifestctx.read()
468 return self._manifestctx.read()
470
469
471 @property
470 @property
472 def _manifestctx(self):
471 def _manifestctx(self):
473 return self._repo.manifestlog[self._changeset.manifest]
472 return self._repo.manifestlog[self._changeset.manifest]
474
473
475 @propertycache
474 @propertycache
476 def _manifestdelta(self):
475 def _manifestdelta(self):
477 return self._manifestctx.readdelta()
476 return self._manifestctx.readdelta()
478
477
479 @propertycache
478 @propertycache
480 def _parents(self):
479 def _parents(self):
481 repo = self._repo
480 repo = self._repo
482 p1, p2 = repo.changelog.parentrevs(self._rev)
481 p1, p2 = repo.changelog.parentrevs(self._rev)
483 if p2 == nullrev:
482 if p2 == nullrev:
484 return [changectx(repo, p1)]
483 return [changectx(repo, p1)]
485 return [changectx(repo, p1), changectx(repo, p2)]
484 return [changectx(repo, p1), changectx(repo, p2)]
486
485
487 def changeset(self):
486 def changeset(self):
488 c = self._changeset
487 c = self._changeset
489 return (
488 return (
490 c.manifest,
489 c.manifest,
491 c.user,
490 c.user,
492 c.date,
491 c.date,
493 c.files,
492 c.files,
494 c.description,
493 c.description,
495 c.extra,
494 c.extra,
496 )
495 )
497 def manifestnode(self):
496 def manifestnode(self):
498 return self._changeset.manifest
497 return self._changeset.manifest
499
498
500 def user(self):
499 def user(self):
501 return self._changeset.user
500 return self._changeset.user
502 def date(self):
501 def date(self):
503 return self._changeset.date
502 return self._changeset.date
504 def files(self):
503 def files(self):
505 return self._changeset.files
504 return self._changeset.files
506 def description(self):
505 def description(self):
507 return self._changeset.description
506 return self._changeset.description
508 def branch(self):
507 def branch(self):
509 return encoding.tolocal(self._changeset.extra.get("branch"))
508 return encoding.tolocal(self._changeset.extra.get("branch"))
510 def closesbranch(self):
509 def closesbranch(self):
511 return 'close' in self._changeset.extra
510 return 'close' in self._changeset.extra
512 def extra(self):
511 def extra(self):
513 """Return a dict of extra information."""
512 """Return a dict of extra information."""
514 return self._changeset.extra
513 return self._changeset.extra
515 def tags(self):
514 def tags(self):
516 """Return a list of byte tag names"""
515 """Return a list of byte tag names"""
517 return self._repo.nodetags(self._node)
516 return self._repo.nodetags(self._node)
518 def bookmarks(self):
517 def bookmarks(self):
519 """Return a list of byte bookmark names."""
518 """Return a list of byte bookmark names."""
520 return self._repo.nodebookmarks(self._node)
519 return self._repo.nodebookmarks(self._node)
521 def phase(self):
520 def phase(self):
522 return self._repo._phasecache.phase(self._repo, self._rev)
521 return self._repo._phasecache.phase(self._repo, self._rev)
523 def hidden(self):
522 def hidden(self):
524 return self._rev in repoview.filterrevs(self._repo, 'visible')
523 return self._rev in repoview.filterrevs(self._repo, 'visible')
525
524
526 def isinmemory(self):
525 def isinmemory(self):
527 return False
526 return False
528
527
529 def children(self):
528 def children(self):
530 """return list of changectx contexts for each child changeset.
529 """return list of changectx contexts for each child changeset.
531
530
532 This returns only the immediate child changesets. Use descendants() to
531 This returns only the immediate child changesets. Use descendants() to
533 recursively walk children.
532 recursively walk children.
534 """
533 """
535 c = self._repo.changelog.children(self._node)
534 c = self._repo.changelog.children(self._node)
536 return [changectx(self._repo, x) for x in c]
535 return [changectx(self._repo, x) for x in c]
537
536
538 def ancestors(self):
537 def ancestors(self):
539 for a in self._repo.changelog.ancestors([self._rev]):
538 for a in self._repo.changelog.ancestors([self._rev]):
540 yield changectx(self._repo, a)
539 yield changectx(self._repo, a)
541
540
542 def descendants(self):
541 def descendants(self):
543 """Recursively yield all children of the changeset.
542 """Recursively yield all children of the changeset.
544
543
545 For just the immediate children, use children()
544 For just the immediate children, use children()
546 """
545 """
547 for d in self._repo.changelog.descendants([self._rev]):
546 for d in self._repo.changelog.descendants([self._rev]):
548 yield changectx(self._repo, d)
547 yield changectx(self._repo, d)
549
548
550 def filectx(self, path, fileid=None, filelog=None):
549 def filectx(self, path, fileid=None, filelog=None):
551 """get a file context from this changeset"""
550 """get a file context from this changeset"""
552 if fileid is None:
551 if fileid is None:
553 fileid = self.filenode(path)
552 fileid = self.filenode(path)
554 return filectx(self._repo, path, fileid=fileid,
553 return filectx(self._repo, path, fileid=fileid,
555 changectx=self, filelog=filelog)
554 changectx=self, filelog=filelog)
556
555
557 def ancestor(self, c2, warn=False):
556 def ancestor(self, c2, warn=False):
558 """return the "best" ancestor context of self and c2
557 """return the "best" ancestor context of self and c2
559
558
560 If there are multiple candidates, it will show a message and check
559 If there are multiple candidates, it will show a message and check
561 merge.preferancestor configuration before falling back to the
560 merge.preferancestor configuration before falling back to the
562 revlog ancestor."""
561 revlog ancestor."""
563 # deal with workingctxs
562 # deal with workingctxs
564 n2 = c2._node
563 n2 = c2._node
565 if n2 is None:
564 if n2 is None:
566 n2 = c2._parents[0]._node
565 n2 = c2._parents[0]._node
567 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
566 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
568 if not cahs:
567 if not cahs:
569 anc = nullid
568 anc = nullid
570 elif len(cahs) == 1:
569 elif len(cahs) == 1:
571 anc = cahs[0]
570 anc = cahs[0]
572 else:
571 else:
573 # experimental config: merge.preferancestor
572 # experimental config: merge.preferancestor
574 for r in self._repo.ui.configlist('merge', 'preferancestor'):
573 for r in self._repo.ui.configlist('merge', 'preferancestor'):
575 try:
574 try:
576 ctx = scmutil.revsymbol(self._repo, r)
575 ctx = scmutil.revsymbol(self._repo, r)
577 except error.RepoLookupError:
576 except error.RepoLookupError:
578 continue
577 continue
579 anc = ctx.node()
578 anc = ctx.node()
580 if anc in cahs:
579 if anc in cahs:
581 break
580 break
582 else:
581 else:
583 anc = self._repo.changelog.ancestor(self._node, n2)
582 anc = self._repo.changelog.ancestor(self._node, n2)
584 if warn:
583 if warn:
585 self._repo.ui.status(
584 self._repo.ui.status(
586 (_("note: using %s as ancestor of %s and %s\n") %
585 (_("note: using %s as ancestor of %s and %s\n") %
587 (short(anc), short(self._node), short(n2))) +
586 (short(anc), short(self._node), short(n2))) +
588 ''.join(_(" alternatively, use --config "
587 ''.join(_(" alternatively, use --config "
589 "merge.preferancestor=%s\n") %
588 "merge.preferancestor=%s\n") %
590 short(n) for n in sorted(cahs) if n != anc))
589 short(n) for n in sorted(cahs) if n != anc))
591 return changectx(self._repo, anc)
590 return changectx(self._repo, anc)
592
591
593 def descendant(self, other):
592 def descendant(self, other):
594 msg = (b'ctx.descendant(other) is deprecated, '
593 msg = (b'ctx.descendant(other) is deprecated, '
595 b'use ctx.isancestorof(other)')
594 b'use ctx.isancestorof(other)')
596 self._repo.ui.deprecwarn(msg, b'4.7')
595 self._repo.ui.deprecwarn(msg, b'4.7')
597 return self.isancestorof(other)
596 return self.isancestorof(other)
598
597
599 def isancestorof(self, other):
598 def isancestorof(self, other):
600 """True if this changeset is an ancestor of other"""
599 """True if this changeset is an ancestor of other"""
601 return self._repo.changelog.isancestorrev(self._rev, other._rev)
600 return self._repo.changelog.isancestorrev(self._rev, other._rev)
602
601
603 def walk(self, match):
602 def walk(self, match):
604 '''Generates matching file names.'''
603 '''Generates matching file names.'''
605
604
606 # Wrap match.bad method to have message with nodeid
605 # Wrap match.bad method to have message with nodeid
607 def bad(fn, msg):
606 def bad(fn, msg):
608 # The manifest doesn't know about subrepos, so don't complain about
607 # The manifest doesn't know about subrepos, so don't complain about
609 # paths into valid subrepos.
608 # paths into valid subrepos.
610 if any(fn == s or fn.startswith(s + '/')
609 if any(fn == s or fn.startswith(s + '/')
611 for s in self.substate):
610 for s in self.substate):
612 return
611 return
613 match.bad(fn, _('no such file in rev %s') % self)
612 match.bad(fn, _('no such file in rev %s') % self)
614
613
615 m = matchmod.badmatch(match, bad)
614 m = matchmod.badmatch(match, bad)
616 return self._manifest.walk(m)
615 return self._manifest.walk(m)
617
616
618 def matches(self, match):
617 def matches(self, match):
619 return self.walk(match)
618 return self.walk(match)
620
619
621 class basefilectx(object):
620 class basefilectx(object):
622 """A filecontext object represents the common logic for its children:
621 """A filecontext object represents the common logic for its children:
623 filectx: read-only access to a filerevision that is already present
622 filectx: read-only access to a filerevision that is already present
624 in the repo,
623 in the repo,
625 workingfilectx: a filecontext that represents files from the working
624 workingfilectx: a filecontext that represents files from the working
626 directory,
625 directory,
627 memfilectx: a filecontext that represents files in-memory,
626 memfilectx: a filecontext that represents files in-memory,
628 overlayfilectx: duplicate another filecontext with some fields overridden.
627 overlayfilectx: duplicate another filecontext with some fields overridden.
629 """
628 """
630 @propertycache
629 @propertycache
631 def _filelog(self):
630 def _filelog(self):
632 return self._repo.file(self._path)
631 return self._repo.file(self._path)
633
632
634 @propertycache
633 @propertycache
635 def _changeid(self):
634 def _changeid(self):
636 if r'_changeid' in self.__dict__:
635 if r'_changeid' in self.__dict__:
637 return self._changeid
636 return self._changeid
638 elif r'_changectx' in self.__dict__:
637 elif r'_changectx' in self.__dict__:
639 return self._changectx.rev()
638 return self._changectx.rev()
640 elif r'_descendantrev' in self.__dict__:
639 elif r'_descendantrev' in self.__dict__:
641 # this file context was created from a revision with a known
640 # this file context was created from a revision with a known
642 # descendant, we can (lazily) correct for linkrev aliases
641 # descendant, we can (lazily) correct for linkrev aliases
643 return self._adjustlinkrev(self._descendantrev)
642 return self._adjustlinkrev(self._descendantrev)
644 else:
643 else:
645 return self._filelog.linkrev(self._filerev)
644 return self._filelog.linkrev(self._filerev)
646
645
647 @propertycache
646 @propertycache
648 def _filenode(self):
647 def _filenode(self):
649 if r'_fileid' in self.__dict__:
648 if r'_fileid' in self.__dict__:
650 return self._filelog.lookup(self._fileid)
649 return self._filelog.lookup(self._fileid)
651 else:
650 else:
652 return self._changectx.filenode(self._path)
651 return self._changectx.filenode(self._path)
653
652
654 @propertycache
653 @propertycache
655 def _filerev(self):
654 def _filerev(self):
656 return self._filelog.rev(self._filenode)
655 return self._filelog.rev(self._filenode)
657
656
658 @propertycache
657 @propertycache
659 def _repopath(self):
658 def _repopath(self):
660 return self._path
659 return self._path
661
660
662 def __nonzero__(self):
661 def __nonzero__(self):
663 try:
662 try:
664 self._filenode
663 self._filenode
665 return True
664 return True
666 except error.LookupError:
665 except error.LookupError:
667 # file is missing
666 # file is missing
668 return False
667 return False
669
668
670 __bool__ = __nonzero__
669 __bool__ = __nonzero__
671
670
672 def __bytes__(self):
671 def __bytes__(self):
673 try:
672 try:
674 return "%s@%s" % (self.path(), self._changectx)
673 return "%s@%s" % (self.path(), self._changectx)
675 except error.LookupError:
674 except error.LookupError:
676 return "%s@???" % self.path()
675 return "%s@???" % self.path()
677
676
678 __str__ = encoding.strmethod(__bytes__)
677 __str__ = encoding.strmethod(__bytes__)
679
678
680 def __repr__(self):
679 def __repr__(self):
681 return r"<%s %s>" % (type(self).__name__, str(self))
680 return r"<%s %s>" % (type(self).__name__, str(self))
682
681
683 def __hash__(self):
682 def __hash__(self):
684 try:
683 try:
685 return hash((self._path, self._filenode))
684 return hash((self._path, self._filenode))
686 except AttributeError:
685 except AttributeError:
687 return id(self)
686 return id(self)
688
687
689 def __eq__(self, other):
688 def __eq__(self, other):
690 try:
689 try:
691 return (type(self) == type(other) and self._path == other._path
690 return (type(self) == type(other) and self._path == other._path
692 and self._filenode == other._filenode)
691 and self._filenode == other._filenode)
693 except AttributeError:
692 except AttributeError:
694 return False
693 return False
695
694
696 def __ne__(self, other):
695 def __ne__(self, other):
697 return not (self == other)
696 return not (self == other)
698
697
699 def filerev(self):
698 def filerev(self):
700 return self._filerev
699 return self._filerev
701 def filenode(self):
700 def filenode(self):
702 return self._filenode
701 return self._filenode
703 @propertycache
702 @propertycache
704 def _flags(self):
703 def _flags(self):
705 return self._changectx.flags(self._path)
704 return self._changectx.flags(self._path)
706 def flags(self):
705 def flags(self):
707 return self._flags
706 return self._flags
708 def filelog(self):
707 def filelog(self):
709 return self._filelog
708 return self._filelog
710 def rev(self):
709 def rev(self):
711 return self._changeid
710 return self._changeid
712 def linkrev(self):
711 def linkrev(self):
713 return self._filelog.linkrev(self._filerev)
712 return self._filelog.linkrev(self._filerev)
714 def node(self):
713 def node(self):
715 return self._changectx.node()
714 return self._changectx.node()
716 def hex(self):
715 def hex(self):
717 return self._changectx.hex()
716 return self._changectx.hex()
718 def user(self):
717 def user(self):
719 return self._changectx.user()
718 return self._changectx.user()
720 def date(self):
719 def date(self):
721 return self._changectx.date()
720 return self._changectx.date()
722 def files(self):
721 def files(self):
723 return self._changectx.files()
722 return self._changectx.files()
724 def description(self):
723 def description(self):
725 return self._changectx.description()
724 return self._changectx.description()
726 def branch(self):
725 def branch(self):
727 return self._changectx.branch()
726 return self._changectx.branch()
728 def extra(self):
727 def extra(self):
729 return self._changectx.extra()
728 return self._changectx.extra()
730 def phase(self):
729 def phase(self):
731 return self._changectx.phase()
730 return self._changectx.phase()
732 def phasestr(self):
731 def phasestr(self):
733 return self._changectx.phasestr()
732 return self._changectx.phasestr()
734 def obsolete(self):
733 def obsolete(self):
735 return self._changectx.obsolete()
734 return self._changectx.obsolete()
736 def instabilities(self):
735 def instabilities(self):
737 return self._changectx.instabilities()
736 return self._changectx.instabilities()
738 def manifest(self):
737 def manifest(self):
739 return self._changectx.manifest()
738 return self._changectx.manifest()
740 def changectx(self):
739 def changectx(self):
741 return self._changectx
740 return self._changectx
742 def renamed(self):
741 def renamed(self):
743 return self._copied
742 return self._copied
744 def repo(self):
743 def repo(self):
745 return self._repo
744 return self._repo
746 def size(self):
745 def size(self):
747 return len(self.data())
746 return len(self.data())
748
747
749 def path(self):
748 def path(self):
750 return self._path
749 return self._path
751
750
752 def isbinary(self):
751 def isbinary(self):
753 try:
752 try:
754 return stringutil.binary(self.data())
753 return stringutil.binary(self.data())
755 except IOError:
754 except IOError:
756 return False
755 return False
757 def isexec(self):
756 def isexec(self):
758 return 'x' in self.flags()
757 return 'x' in self.flags()
759 def islink(self):
758 def islink(self):
760 return 'l' in self.flags()
759 return 'l' in self.flags()
761
760
762 def isabsent(self):
761 def isabsent(self):
763 """whether this filectx represents a file not in self._changectx
762 """whether this filectx represents a file not in self._changectx
764
763
765 This is mainly for merge code to detect change/delete conflicts. This is
764 This is mainly for merge code to detect change/delete conflicts. This is
766 expected to be True for all subclasses of basectx."""
765 expected to be True for all subclasses of basectx."""
767 return False
766 return False
768
767
769 _customcmp = False
768 _customcmp = False
770 def cmp(self, fctx):
769 def cmp(self, fctx):
771 """compare with other file context
770 """compare with other file context
772
771
773 returns True if different than fctx.
772 returns True if different than fctx.
774 """
773 """
775 if fctx._customcmp:
774 if fctx._customcmp:
776 return fctx.cmp(self)
775 return fctx.cmp(self)
777
776
778 if (fctx._filenode is None
777 if (fctx._filenode is None
779 and (self._repo._encodefilterpats
778 and (self._repo._encodefilterpats
780 # if file data starts with '\1\n', empty metadata block is
779 # if file data starts with '\1\n', empty metadata block is
781 # prepended, which adds 4 bytes to filelog.size().
780 # prepended, which adds 4 bytes to filelog.size().
782 or self.size() - 4 == fctx.size())
781 or self.size() - 4 == fctx.size())
783 or self.size() == fctx.size()):
782 or self.size() == fctx.size()):
784 return self._filelog.cmp(self._filenode, fctx.data())
783 return self._filelog.cmp(self._filenode, fctx.data())
785
784
786 return True
785 return True
787
786
788 def _adjustlinkrev(self, srcrev, inclusive=False):
787 def _adjustlinkrev(self, srcrev, inclusive=False):
789 """return the first ancestor of <srcrev> introducing <fnode>
788 """return the first ancestor of <srcrev> introducing <fnode>
790
789
791 If the linkrev of the file revision does not point to an ancestor of
790 If the linkrev of the file revision does not point to an ancestor of
792 srcrev, we'll walk down the ancestors until we find one introducing
791 srcrev, we'll walk down the ancestors until we find one introducing
793 this file revision.
792 this file revision.
794
793
795 :srcrev: the changeset revision we search ancestors from
794 :srcrev: the changeset revision we search ancestors from
796 :inclusive: if true, the src revision will also be checked
795 :inclusive: if true, the src revision will also be checked
797 """
796 """
798 repo = self._repo
797 repo = self._repo
799 cl = repo.unfiltered().changelog
798 cl = repo.unfiltered().changelog
800 mfl = repo.manifestlog
799 mfl = repo.manifestlog
801 # fetch the linkrev
800 # fetch the linkrev
802 lkr = self.linkrev()
801 lkr = self.linkrev()
803 # hack to reuse ancestor computation when searching for renames
802 # hack to reuse ancestor computation when searching for renames
804 memberanc = getattr(self, '_ancestrycontext', None)
803 memberanc = getattr(self, '_ancestrycontext', None)
805 iteranc = None
804 iteranc = None
806 if srcrev is None:
805 if srcrev is None:
807 # wctx case, used by workingfilectx during mergecopy
806 # wctx case, used by workingfilectx during mergecopy
808 revs = [p.rev() for p in self._repo[None].parents()]
807 revs = [p.rev() for p in self._repo[None].parents()]
809 inclusive = True # we skipped the real (revless) source
808 inclusive = True # we skipped the real (revless) source
810 else:
809 else:
811 revs = [srcrev]
810 revs = [srcrev]
812 if memberanc is None:
811 if memberanc is None:
813 memberanc = iteranc = cl.ancestors(revs, lkr,
812 memberanc = iteranc = cl.ancestors(revs, lkr,
814 inclusive=inclusive)
813 inclusive=inclusive)
815 # check if this linkrev is an ancestor of srcrev
814 # check if this linkrev is an ancestor of srcrev
816 if lkr not in memberanc:
815 if lkr not in memberanc:
817 if iteranc is None:
816 if iteranc is None:
818 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
817 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
819 fnode = self._filenode
818 fnode = self._filenode
820 path = self._path
819 path = self._path
821 for a in iteranc:
820 for a in iteranc:
822 ac = cl.read(a) # get changeset data (we avoid object creation)
821 ac = cl.read(a) # get changeset data (we avoid object creation)
823 if path in ac[3]: # checking the 'files' field.
822 if path in ac[3]: # checking the 'files' field.
824 # The file has been touched, check if the content is
823 # The file has been touched, check if the content is
825 # similar to the one we search for.
824 # similar to the one we search for.
826 if fnode == mfl[ac[0]].readfast().get(path):
825 if fnode == mfl[ac[0]].readfast().get(path):
827 return a
826 return a
828 # In theory, we should never get out of that loop without a result.
827 # In theory, we should never get out of that loop without a result.
829 # But if manifest uses a buggy file revision (not children of the
828 # But if manifest uses a buggy file revision (not children of the
830 # one it replaces) we could. Such a buggy situation will likely
829 # one it replaces) we could. Such a buggy situation will likely
831 # result is crash somewhere else at to some point.
830 # result is crash somewhere else at to some point.
832 return lkr
831 return lkr
833
832
834 def introrev(self):
833 def introrev(self):
835 """return the rev of the changeset which introduced this file revision
834 """return the rev of the changeset which introduced this file revision
836
835
837 This method is different from linkrev because it take into account the
836 This method is different from linkrev because it take into account the
838 changeset the filectx was created from. It ensures the returned
837 changeset the filectx was created from. It ensures the returned
839 revision is one of its ancestors. This prevents bugs from
838 revision is one of its ancestors. This prevents bugs from
840 'linkrev-shadowing' when a file revision is used by multiple
839 'linkrev-shadowing' when a file revision is used by multiple
841 changesets.
840 changesets.
842 """
841 """
843 lkr = self.linkrev()
842 lkr = self.linkrev()
844 attrs = vars(self)
843 attrs = vars(self)
845 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
844 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
846 if noctx or self.rev() == lkr:
845 if noctx or self.rev() == lkr:
847 return self.linkrev()
846 return self.linkrev()
848 return self._adjustlinkrev(self.rev(), inclusive=True)
847 return self._adjustlinkrev(self.rev(), inclusive=True)
849
848
850 def introfilectx(self):
849 def introfilectx(self):
851 """Return filectx having identical contents, but pointing to the
850 """Return filectx having identical contents, but pointing to the
852 changeset revision where this filectx was introduced"""
851 changeset revision where this filectx was introduced"""
853 introrev = self.introrev()
852 introrev = self.introrev()
854 if self.rev() == introrev:
853 if self.rev() == introrev:
855 return self
854 return self
856 return self.filectx(self.filenode(), changeid=introrev)
855 return self.filectx(self.filenode(), changeid=introrev)
857
856
858 def _parentfilectx(self, path, fileid, filelog):
857 def _parentfilectx(self, path, fileid, filelog):
859 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
858 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
860 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
859 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
861 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
860 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
862 # If self is associated with a changeset (probably explicitly
861 # If self is associated with a changeset (probably explicitly
863 # fed), ensure the created filectx is associated with a
862 # fed), ensure the created filectx is associated with a
864 # changeset that is an ancestor of self.changectx.
863 # changeset that is an ancestor of self.changectx.
865 # This lets us later use _adjustlinkrev to get a correct link.
864 # This lets us later use _adjustlinkrev to get a correct link.
866 fctx._descendantrev = self.rev()
865 fctx._descendantrev = self.rev()
867 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
866 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
868 elif r'_descendantrev' in vars(self):
867 elif r'_descendantrev' in vars(self):
869 # Otherwise propagate _descendantrev if we have one associated.
868 # Otherwise propagate _descendantrev if we have one associated.
870 fctx._descendantrev = self._descendantrev
869 fctx._descendantrev = self._descendantrev
871 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
870 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
872 return fctx
871 return fctx
873
872
874 def parents(self):
873 def parents(self):
875 _path = self._path
874 _path = self._path
876 fl = self._filelog
875 fl = self._filelog
877 parents = self._filelog.parents(self._filenode)
876 parents = self._filelog.parents(self._filenode)
878 pl = [(_path, node, fl) for node in parents if node != nullid]
877 pl = [(_path, node, fl) for node in parents if node != nullid]
879
878
880 r = fl.renamed(self._filenode)
879 r = fl.renamed(self._filenode)
881 if r:
880 if r:
882 # - In the simple rename case, both parent are nullid, pl is empty.
881 # - In the simple rename case, both parent are nullid, pl is empty.
883 # - In case of merge, only one of the parent is null id and should
882 # - In case of merge, only one of the parent is null id and should
884 # be replaced with the rename information. This parent is -always-
883 # be replaced with the rename information. This parent is -always-
885 # the first one.
884 # the first one.
886 #
885 #
887 # As null id have always been filtered out in the previous list
886 # As null id have always been filtered out in the previous list
888 # comprehension, inserting to 0 will always result in "replacing
887 # comprehension, inserting to 0 will always result in "replacing
889 # first nullid parent with rename information.
888 # first nullid parent with rename information.
890 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
889 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
891
890
892 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
891 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
893
892
894 def p1(self):
893 def p1(self):
895 return self.parents()[0]
894 return self.parents()[0]
896
895
897 def p2(self):
896 def p2(self):
898 p = self.parents()
897 p = self.parents()
899 if len(p) == 2:
898 if len(p) == 2:
900 return p[1]
899 return p[1]
901 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
900 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
902
901
903 def annotate(self, follow=False, skiprevs=None, diffopts=None):
902 def annotate(self, follow=False, skiprevs=None, diffopts=None):
904 """Returns a list of annotateline objects for each line in the file
903 """Returns a list of annotateline objects for each line in the file
905
904
906 - line.fctx is the filectx of the node where that line was last changed
905 - line.fctx is the filectx of the node where that line was last changed
907 - line.lineno is the line number at the first appearance in the managed
906 - line.lineno is the line number at the first appearance in the managed
908 file
907 file
909 - line.text is the data on that line (including newline character)
908 - line.text is the data on that line (including newline character)
910 """
909 """
911 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
910 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
912
911
913 def parents(f):
912 def parents(f):
914 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
913 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
915 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
914 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
916 # from the topmost introrev (= srcrev) down to p.linkrev() if it
915 # from the topmost introrev (= srcrev) down to p.linkrev() if it
917 # isn't an ancestor of the srcrev.
916 # isn't an ancestor of the srcrev.
918 f._changeid
917 f._changeid
919 pl = f.parents()
918 pl = f.parents()
920
919
921 # Don't return renamed parents if we aren't following.
920 # Don't return renamed parents if we aren't following.
922 if not follow:
921 if not follow:
923 pl = [p for p in pl if p.path() == f.path()]
922 pl = [p for p in pl if p.path() == f.path()]
924
923
925 # renamed filectx won't have a filelog yet, so set it
924 # renamed filectx won't have a filelog yet, so set it
926 # from the cache to save time
925 # from the cache to save time
927 for p in pl:
926 for p in pl:
928 if not r'_filelog' in p.__dict__:
927 if not r'_filelog' in p.__dict__:
929 p._filelog = getlog(p.path())
928 p._filelog = getlog(p.path())
930
929
931 return pl
930 return pl
932
931
933 # use linkrev to find the first changeset where self appeared
932 # use linkrev to find the first changeset where self appeared
934 base = self.introfilectx()
933 base = self.introfilectx()
935 if getattr(base, '_ancestrycontext', None) is None:
934 if getattr(base, '_ancestrycontext', None) is None:
936 cl = self._repo.changelog
935 cl = self._repo.changelog
937 if base.rev() is None:
936 if base.rev() is None:
938 # wctx is not inclusive, but works because _ancestrycontext
937 # wctx is not inclusive, but works because _ancestrycontext
939 # is used to test filelog revisions
938 # is used to test filelog revisions
940 ac = cl.ancestors([p.rev() for p in base.parents()],
939 ac = cl.ancestors([p.rev() for p in base.parents()],
941 inclusive=True)
940 inclusive=True)
942 else:
941 else:
943 ac = cl.ancestors([base.rev()], inclusive=True)
942 ac = cl.ancestors([base.rev()], inclusive=True)
944 base._ancestrycontext = ac
943 base._ancestrycontext = ac
945
944
946 return dagop.annotate(base, parents, skiprevs=skiprevs,
945 return dagop.annotate(base, parents, skiprevs=skiprevs,
947 diffopts=diffopts)
946 diffopts=diffopts)
948
947
949 def ancestors(self, followfirst=False):
948 def ancestors(self, followfirst=False):
950 visit = {}
949 visit = {}
951 c = self
950 c = self
952 if followfirst:
951 if followfirst:
953 cut = 1
952 cut = 1
954 else:
953 else:
955 cut = None
954 cut = None
956
955
957 while True:
956 while True:
958 for parent in c.parents()[:cut]:
957 for parent in c.parents()[:cut]:
959 visit[(parent.linkrev(), parent.filenode())] = parent
958 visit[(parent.linkrev(), parent.filenode())] = parent
960 if not visit:
959 if not visit:
961 break
960 break
962 c = visit.pop(max(visit))
961 c = visit.pop(max(visit))
963 yield c
962 yield c
964
963
965 def decodeddata(self):
964 def decodeddata(self):
966 """Returns `data()` after running repository decoding filters.
965 """Returns `data()` after running repository decoding filters.
967
966
968 This is often equivalent to how the data would be expressed on disk.
967 This is often equivalent to how the data would be expressed on disk.
969 """
968 """
970 return self._repo.wwritedata(self.path(), self.data())
969 return self._repo.wwritedata(self.path(), self.data())
971
970
972 class filectx(basefilectx):
971 class filectx(basefilectx):
973 """A filecontext object makes access to data related to a particular
972 """A filecontext object makes access to data related to a particular
974 filerevision convenient."""
973 filerevision convenient."""
975 def __init__(self, repo, path, changeid=None, fileid=None,
974 def __init__(self, repo, path, changeid=None, fileid=None,
976 filelog=None, changectx=None):
975 filelog=None, changectx=None):
977 """changeid can be a changeset revision, node, or tag.
976 """changeid can be a changeset revision, node, or tag.
978 fileid can be a file revision or node."""
977 fileid can be a file revision or node."""
979 self._repo = repo
978 self._repo = repo
980 self._path = path
979 self._path = path
981
980
982 assert (changeid is not None
981 assert (changeid is not None
983 or fileid is not None
982 or fileid is not None
984 or changectx is not None), \
983 or changectx is not None), \
985 ("bad args: changeid=%r, fileid=%r, changectx=%r"
984 ("bad args: changeid=%r, fileid=%r, changectx=%r"
986 % (changeid, fileid, changectx))
985 % (changeid, fileid, changectx))
987
986
988 if filelog is not None:
987 if filelog is not None:
989 self._filelog = filelog
988 self._filelog = filelog
990
989
991 if changeid is not None:
990 if changeid is not None:
992 self._changeid = changeid
991 self._changeid = changeid
993 if changectx is not None:
992 if changectx is not None:
994 self._changectx = changectx
993 self._changectx = changectx
995 if fileid is not None:
994 if fileid is not None:
996 self._fileid = fileid
995 self._fileid = fileid
997
996
998 @propertycache
997 @propertycache
999 def _changectx(self):
998 def _changectx(self):
1000 try:
999 try:
1001 return changectx(self._repo, self._changeid)
1000 return changectx(self._repo, self._changeid)
1002 except error.FilteredRepoLookupError:
1001 except error.FilteredRepoLookupError:
1003 # Linkrev may point to any revision in the repository. When the
1002 # Linkrev may point to any revision in the repository. When the
1004 # repository is filtered this may lead to `filectx` trying to build
1003 # repository is filtered this may lead to `filectx` trying to build
1005 # `changectx` for filtered revision. In such case we fallback to
1004 # `changectx` for filtered revision. In such case we fallback to
1006 # creating `changectx` on the unfiltered version of the reposition.
1005 # creating `changectx` on the unfiltered version of the reposition.
1007 # This fallback should not be an issue because `changectx` from
1006 # This fallback should not be an issue because `changectx` from
1008 # `filectx` are not used in complex operations that care about
1007 # `filectx` are not used in complex operations that care about
1009 # filtering.
1008 # filtering.
1010 #
1009 #
1011 # This fallback is a cheap and dirty fix that prevent several
1010 # This fallback is a cheap and dirty fix that prevent several
1012 # crashes. It does not ensure the behavior is correct. However the
1011 # crashes. It does not ensure the behavior is correct. However the
1013 # behavior was not correct before filtering either and "incorrect
1012 # behavior was not correct before filtering either and "incorrect
1014 # behavior" is seen as better as "crash"
1013 # behavior" is seen as better as "crash"
1015 #
1014 #
1016 # Linkrevs have several serious troubles with filtering that are
1015 # Linkrevs have several serious troubles with filtering that are
1017 # complicated to solve. Proper handling of the issue here should be
1016 # complicated to solve. Proper handling of the issue here should be
1018 # considered when solving linkrev issue are on the table.
1017 # considered when solving linkrev issue are on the table.
1019 return changectx(self._repo.unfiltered(), self._changeid)
1018 return changectx(self._repo.unfiltered(), self._changeid)
1020
1019
1021 def filectx(self, fileid, changeid=None):
1020 def filectx(self, fileid, changeid=None):
1022 '''opens an arbitrary revision of the file without
1021 '''opens an arbitrary revision of the file without
1023 opening a new filelog'''
1022 opening a new filelog'''
1024 return filectx(self._repo, self._path, fileid=fileid,
1023 return filectx(self._repo, self._path, fileid=fileid,
1025 filelog=self._filelog, changeid=changeid)
1024 filelog=self._filelog, changeid=changeid)
1026
1025
1027 def rawdata(self):
1026 def rawdata(self):
1028 return self._filelog.revision(self._filenode, raw=True)
1027 return self._filelog.revision(self._filenode, raw=True)
1029
1028
1030 def rawflags(self):
1029 def rawflags(self):
1031 """low-level revlog flags"""
1030 """low-level revlog flags"""
1032 return self._filelog.flags(self._filerev)
1031 return self._filelog.flags(self._filerev)
1033
1032
1034 def data(self):
1033 def data(self):
1035 try:
1034 try:
1036 return self._filelog.read(self._filenode)
1035 return self._filelog.read(self._filenode)
1037 except error.CensoredNodeError:
1036 except error.CensoredNodeError:
1038 if self._repo.ui.config("censor", "policy") == "ignore":
1037 if self._repo.ui.config("censor", "policy") == "ignore":
1039 return ""
1038 return ""
1040 raise error.Abort(_("censored node: %s") % short(self._filenode),
1039 raise error.Abort(_("censored node: %s") % short(self._filenode),
1041 hint=_("set censor.policy to ignore errors"))
1040 hint=_("set censor.policy to ignore errors"))
1042
1041
1043 def size(self):
1042 def size(self):
1044 return self._filelog.size(self._filerev)
1043 return self._filelog.size(self._filerev)
1045
1044
1046 @propertycache
1045 @propertycache
1047 def _copied(self):
1046 def _copied(self):
1048 """check if file was actually renamed in this changeset revision
1047 """check if file was actually renamed in this changeset revision
1049
1048
1050 If rename logged in file revision, we report copy for changeset only
1049 If rename logged in file revision, we report copy for changeset only
1051 if file revisions linkrev points back to the changeset in question
1050 if file revisions linkrev points back to the changeset in question
1052 or both changeset parents contain different file revisions.
1051 or both changeset parents contain different file revisions.
1053 """
1052 """
1054
1053
1055 renamed = self._filelog.renamed(self._filenode)
1054 renamed = self._filelog.renamed(self._filenode)
1056 if not renamed:
1055 if not renamed:
1057 return renamed
1056 return renamed
1058
1057
1059 if self.rev() == self.linkrev():
1058 if self.rev() == self.linkrev():
1060 return renamed
1059 return renamed
1061
1060
1062 name = self.path()
1061 name = self.path()
1063 fnode = self._filenode
1062 fnode = self._filenode
1064 for p in self._changectx.parents():
1063 for p in self._changectx.parents():
1065 try:
1064 try:
1066 if fnode == p.filenode(name):
1065 if fnode == p.filenode(name):
1067 return None
1066 return None
1068 except error.LookupError:
1067 except error.LookupError:
1069 pass
1068 pass
1070 return renamed
1069 return renamed
1071
1070
1072 def children(self):
1071 def children(self):
1073 # hard for renames
1072 # hard for renames
1074 c = self._filelog.children(self._filenode)
1073 c = self._filelog.children(self._filenode)
1075 return [filectx(self._repo, self._path, fileid=x,
1074 return [filectx(self._repo, self._path, fileid=x,
1076 filelog=self._filelog) for x in c]
1075 filelog=self._filelog) for x in c]
1077
1076
1078 class committablectx(basectx):
1077 class committablectx(basectx):
1079 """A committablectx object provides common functionality for a context that
1078 """A committablectx object provides common functionality for a context that
1080 wants the ability to commit, e.g. workingctx or memctx."""
1079 wants the ability to commit, e.g. workingctx or memctx."""
1081 def __init__(self, repo, text="", user=None, date=None, extra=None,
1080 def __init__(self, repo, text="", user=None, date=None, extra=None,
1082 changes=None):
1081 changes=None):
1083 super(committablectx, self).__init__(repo)
1082 super(committablectx, self).__init__(repo)
1084 self._rev = None
1083 self._rev = None
1085 self._node = None
1084 self._node = None
1086 self._text = text
1085 self._text = text
1087 if date:
1086 if date:
1088 self._date = dateutil.parsedate(date)
1087 self._date = dateutil.parsedate(date)
1089 if user:
1088 if user:
1090 self._user = user
1089 self._user = user
1091 if changes:
1090 if changes:
1092 self._status = changes
1091 self._status = changes
1093
1092
1094 self._extra = {}
1093 self._extra = {}
1095 if extra:
1094 if extra:
1096 self._extra = extra.copy()
1095 self._extra = extra.copy()
1097 if 'branch' not in self._extra:
1096 if 'branch' not in self._extra:
1098 try:
1097 try:
1099 branch = encoding.fromlocal(self._repo.dirstate.branch())
1098 branch = encoding.fromlocal(self._repo.dirstate.branch())
1100 except UnicodeDecodeError:
1099 except UnicodeDecodeError:
1101 raise error.Abort(_('branch name not in UTF-8!'))
1100 raise error.Abort(_('branch name not in UTF-8!'))
1102 self._extra['branch'] = branch
1101 self._extra['branch'] = branch
1103 if self._extra['branch'] == '':
1102 if self._extra['branch'] == '':
1104 self._extra['branch'] = 'default'
1103 self._extra['branch'] = 'default'
1105
1104
1106 def __bytes__(self):
1105 def __bytes__(self):
1107 return bytes(self._parents[0]) + "+"
1106 return bytes(self._parents[0]) + "+"
1108
1107
1109 __str__ = encoding.strmethod(__bytes__)
1108 __str__ = encoding.strmethod(__bytes__)
1110
1109
1111 def __nonzero__(self):
1110 def __nonzero__(self):
1112 return True
1111 return True
1113
1112
1114 __bool__ = __nonzero__
1113 __bool__ = __nonzero__
1115
1114
1116 def _buildflagfunc(self):
1115 def _buildflagfunc(self):
1117 # Create a fallback function for getting file flags when the
1116 # Create a fallback function for getting file flags when the
1118 # filesystem doesn't support them
1117 # filesystem doesn't support them
1119
1118
1120 copiesget = self._repo.dirstate.copies().get
1119 copiesget = self._repo.dirstate.copies().get
1121 parents = self.parents()
1120 parents = self.parents()
1122 if len(parents) < 2:
1121 if len(parents) < 2:
1123 # when we have one parent, it's easy: copy from parent
1122 # when we have one parent, it's easy: copy from parent
1124 man = parents[0].manifest()
1123 man = parents[0].manifest()
1125 def func(f):
1124 def func(f):
1126 f = copiesget(f, f)
1125 f = copiesget(f, f)
1127 return man.flags(f)
1126 return man.flags(f)
1128 else:
1127 else:
1129 # merges are tricky: we try to reconstruct the unstored
1128 # merges are tricky: we try to reconstruct the unstored
1130 # result from the merge (issue1802)
1129 # result from the merge (issue1802)
1131 p1, p2 = parents
1130 p1, p2 = parents
1132 pa = p1.ancestor(p2)
1131 pa = p1.ancestor(p2)
1133 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1132 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1134
1133
1135 def func(f):
1134 def func(f):
1136 f = copiesget(f, f) # may be wrong for merges with copies
1135 f = copiesget(f, f) # may be wrong for merges with copies
1137 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1136 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1138 if fl1 == fl2:
1137 if fl1 == fl2:
1139 return fl1
1138 return fl1
1140 if fl1 == fla:
1139 if fl1 == fla:
1141 return fl2
1140 return fl2
1142 if fl2 == fla:
1141 if fl2 == fla:
1143 return fl1
1142 return fl1
1144 return '' # punt for conflicts
1143 return '' # punt for conflicts
1145
1144
1146 return func
1145 return func
1147
1146
1148 @propertycache
1147 @propertycache
1149 def _flagfunc(self):
1148 def _flagfunc(self):
1150 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1149 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1151
1150
1152 @propertycache
1151 @propertycache
1153 def _status(self):
1152 def _status(self):
1154 return self._repo.status()
1153 return self._repo.status()
1155
1154
1156 @propertycache
1155 @propertycache
1157 def _user(self):
1156 def _user(self):
1158 return self._repo.ui.username()
1157 return self._repo.ui.username()
1159
1158
1160 @propertycache
1159 @propertycache
1161 def _date(self):
1160 def _date(self):
1162 ui = self._repo.ui
1161 ui = self._repo.ui
1163 date = ui.configdate('devel', 'default-date')
1162 date = ui.configdate('devel', 'default-date')
1164 if date is None:
1163 if date is None:
1165 date = dateutil.makedate()
1164 date = dateutil.makedate()
1166 return date
1165 return date
1167
1166
1168 def subrev(self, subpath):
1167 def subrev(self, subpath):
1169 return None
1168 return None
1170
1169
1171 def manifestnode(self):
1170 def manifestnode(self):
1172 return None
1171 return None
1173 def user(self):
1172 def user(self):
1174 return self._user or self._repo.ui.username()
1173 return self._user or self._repo.ui.username()
1175 def date(self):
1174 def date(self):
1176 return self._date
1175 return self._date
1177 def description(self):
1176 def description(self):
1178 return self._text
1177 return self._text
1179 def files(self):
1178 def files(self):
1180 return sorted(self._status.modified + self._status.added +
1179 return sorted(self._status.modified + self._status.added +
1181 self._status.removed)
1180 self._status.removed)
1182
1181
1183 def modified(self):
1182 def modified(self):
1184 return self._status.modified
1183 return self._status.modified
1185 def added(self):
1184 def added(self):
1186 return self._status.added
1185 return self._status.added
1187 def removed(self):
1186 def removed(self):
1188 return self._status.removed
1187 return self._status.removed
1189 def deleted(self):
1188 def deleted(self):
1190 return self._status.deleted
1189 return self._status.deleted
1191 def branch(self):
1190 def branch(self):
1192 return encoding.tolocal(self._extra['branch'])
1191 return encoding.tolocal(self._extra['branch'])
1193 def closesbranch(self):
1192 def closesbranch(self):
1194 return 'close' in self._extra
1193 return 'close' in self._extra
1195 def extra(self):
1194 def extra(self):
1196 return self._extra
1195 return self._extra
1197
1196
1198 def isinmemory(self):
1197 def isinmemory(self):
1199 return False
1198 return False
1200
1199
1201 def tags(self):
1200 def tags(self):
1202 return []
1201 return []
1203
1202
1204 def bookmarks(self):
1203 def bookmarks(self):
1205 b = []
1204 b = []
1206 for p in self.parents():
1205 for p in self.parents():
1207 b.extend(p.bookmarks())
1206 b.extend(p.bookmarks())
1208 return b
1207 return b
1209
1208
1210 def phase(self):
1209 def phase(self):
1211 phase = phases.draft # default phase to draft
1210 phase = phases.draft # default phase to draft
1212 for p in self.parents():
1211 for p in self.parents():
1213 phase = max(phase, p.phase())
1212 phase = max(phase, p.phase())
1214 return phase
1213 return phase
1215
1214
1216 def hidden(self):
1215 def hidden(self):
1217 return False
1216 return False
1218
1217
1219 def children(self):
1218 def children(self):
1220 return []
1219 return []
1221
1220
1222 def flags(self, path):
1221 def flags(self, path):
1223 if r'_manifest' in self.__dict__:
1222 if r'_manifest' in self.__dict__:
1224 try:
1223 try:
1225 return self._manifest.flags(path)
1224 return self._manifest.flags(path)
1226 except KeyError:
1225 except KeyError:
1227 return ''
1226 return ''
1228
1227
1229 try:
1228 try:
1230 return self._flagfunc(path)
1229 return self._flagfunc(path)
1231 except OSError:
1230 except OSError:
1232 return ''
1231 return ''
1233
1232
1234 def ancestor(self, c2):
1233 def ancestor(self, c2):
1235 """return the "best" ancestor context of self and c2"""
1234 """return the "best" ancestor context of self and c2"""
1236 return self._parents[0].ancestor(c2) # punt on two parents for now
1235 return self._parents[0].ancestor(c2) # punt on two parents for now
1237
1236
1238 def walk(self, match):
1237 def walk(self, match):
1239 '''Generates matching file names.'''
1238 '''Generates matching file names.'''
1240 return sorted(self._repo.dirstate.walk(match,
1239 return sorted(self._repo.dirstate.walk(match,
1241 subrepos=sorted(self.substate),
1240 subrepos=sorted(self.substate),
1242 unknown=True, ignored=False))
1241 unknown=True, ignored=False))
1243
1242
1244 def matches(self, match):
1243 def matches(self, match):
1245 ds = self._repo.dirstate
1244 ds = self._repo.dirstate
1246 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1245 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1247
1246
1248 def ancestors(self):
1247 def ancestors(self):
1249 for p in self._parents:
1248 for p in self._parents:
1250 yield p
1249 yield p
1251 for a in self._repo.changelog.ancestors(
1250 for a in self._repo.changelog.ancestors(
1252 [p.rev() for p in self._parents]):
1251 [p.rev() for p in self._parents]):
1253 yield changectx(self._repo, a)
1252 yield changectx(self._repo, a)
1254
1253
1255 def markcommitted(self, node):
1254 def markcommitted(self, node):
1256 """Perform post-commit cleanup necessary after committing this ctx
1255 """Perform post-commit cleanup necessary after committing this ctx
1257
1256
1258 Specifically, this updates backing stores this working context
1257 Specifically, this updates backing stores this working context
1259 wraps to reflect the fact that the changes reflected by this
1258 wraps to reflect the fact that the changes reflected by this
1260 workingctx have been committed. For example, it marks
1259 workingctx have been committed. For example, it marks
1261 modified and added files as normal in the dirstate.
1260 modified and added files as normal in the dirstate.
1262
1261
1263 """
1262 """
1264
1263
1265 with self._repo.dirstate.parentchange():
1264 with self._repo.dirstate.parentchange():
1266 for f in self.modified() + self.added():
1265 for f in self.modified() + self.added():
1267 self._repo.dirstate.normal(f)
1266 self._repo.dirstate.normal(f)
1268 for f in self.removed():
1267 for f in self.removed():
1269 self._repo.dirstate.drop(f)
1268 self._repo.dirstate.drop(f)
1270 self._repo.dirstate.setparents(node)
1269 self._repo.dirstate.setparents(node)
1271
1270
1272 # write changes out explicitly, because nesting wlock at
1271 # write changes out explicitly, because nesting wlock at
1273 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1272 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1274 # from immediately doing so for subsequent changing files
1273 # from immediately doing so for subsequent changing files
1275 self._repo.dirstate.write(self._repo.currenttransaction())
1274 self._repo.dirstate.write(self._repo.currenttransaction())
1276
1275
1277 def dirty(self, missing=False, merge=True, branch=True):
1276 def dirty(self, missing=False, merge=True, branch=True):
1278 return False
1277 return False
1279
1278
1280 class workingctx(committablectx):
1279 class workingctx(committablectx):
1281 """A workingctx object makes access to data related to
1280 """A workingctx object makes access to data related to
1282 the current working directory convenient.
1281 the current working directory convenient.
1283 date - any valid date string or (unixtime, offset), or None.
1282 date - any valid date string or (unixtime, offset), or None.
1284 user - username string, or None.
1283 user - username string, or None.
1285 extra - a dictionary of extra values, or None.
1284 extra - a dictionary of extra values, or None.
1286 changes - a list of file lists as returned by localrepo.status()
1285 changes - a list of file lists as returned by localrepo.status()
1287 or None to use the repository status.
1286 or None to use the repository status.
1288 """
1287 """
1289 def __init__(self, repo, text="", user=None, date=None, extra=None,
1288 def __init__(self, repo, text="", user=None, date=None, extra=None,
1290 changes=None):
1289 changes=None):
1291 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1290 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1292
1291
1293 def __iter__(self):
1292 def __iter__(self):
1294 d = self._repo.dirstate
1293 d = self._repo.dirstate
1295 for f in d:
1294 for f in d:
1296 if d[f] != 'r':
1295 if d[f] != 'r':
1297 yield f
1296 yield f
1298
1297
1299 def __contains__(self, key):
1298 def __contains__(self, key):
1300 return self._repo.dirstate[key] not in "?r"
1299 return self._repo.dirstate[key] not in "?r"
1301
1300
1302 def hex(self):
1301 def hex(self):
1303 return hex(wdirid)
1302 return hex(wdirid)
1304
1303
1305 @propertycache
1304 @propertycache
1306 def _parents(self):
1305 def _parents(self):
1307 p = self._repo.dirstate.parents()
1306 p = self._repo.dirstate.parents()
1308 if p[1] == nullid:
1307 if p[1] == nullid:
1309 p = p[:-1]
1308 p = p[:-1]
1310 return [changectx(self._repo, x) for x in p]
1309 return [changectx(self._repo, x) for x in p]
1311
1310
1312 def _fileinfo(self, path):
1311 def _fileinfo(self, path):
1313 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1312 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1314 self._manifest
1313 self._manifest
1315 return super(workingctx, self)._fileinfo(path)
1314 return super(workingctx, self)._fileinfo(path)
1316
1315
1317 def filectx(self, path, filelog=None):
1316 def filectx(self, path, filelog=None):
1318 """get a file context from the working directory"""
1317 """get a file context from the working directory"""
1319 return workingfilectx(self._repo, path, workingctx=self,
1318 return workingfilectx(self._repo, path, workingctx=self,
1320 filelog=filelog)
1319 filelog=filelog)
1321
1320
1322 def dirty(self, missing=False, merge=True, branch=True):
1321 def dirty(self, missing=False, merge=True, branch=True):
1323 "check whether a working directory is modified"
1322 "check whether a working directory is modified"
1324 # check subrepos first
1323 # check subrepos first
1325 for s in sorted(self.substate):
1324 for s in sorted(self.substate):
1326 if self.sub(s).dirty(missing=missing):
1325 if self.sub(s).dirty(missing=missing):
1327 return True
1326 return True
1328 # check current working dir
1327 # check current working dir
1329 return ((merge and self.p2()) or
1328 return ((merge and self.p2()) or
1330 (branch and self.branch() != self.p1().branch()) or
1329 (branch and self.branch() != self.p1().branch()) or
1331 self.modified() or self.added() or self.removed() or
1330 self.modified() or self.added() or self.removed() or
1332 (missing and self.deleted()))
1331 (missing and self.deleted()))
1333
1332
1334 def add(self, list, prefix=""):
1333 def add(self, list, prefix=""):
1335 with self._repo.wlock():
1334 with self._repo.wlock():
1336 ui, ds = self._repo.ui, self._repo.dirstate
1335 ui, ds = self._repo.ui, self._repo.dirstate
1337 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1336 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1338 rejected = []
1337 rejected = []
1339 lstat = self._repo.wvfs.lstat
1338 lstat = self._repo.wvfs.lstat
1340 for f in list:
1339 for f in list:
1341 # ds.pathto() returns an absolute file when this is invoked from
1340 # ds.pathto() returns an absolute file when this is invoked from
1342 # the keyword extension. That gets flagged as non-portable on
1341 # the keyword extension. That gets flagged as non-portable on
1343 # Windows, since it contains the drive letter and colon.
1342 # Windows, since it contains the drive letter and colon.
1344 scmutil.checkportable(ui, os.path.join(prefix, f))
1343 scmutil.checkportable(ui, os.path.join(prefix, f))
1345 try:
1344 try:
1346 st = lstat(f)
1345 st = lstat(f)
1347 except OSError:
1346 except OSError:
1348 ui.warn(_("%s does not exist!\n") % uipath(f))
1347 ui.warn(_("%s does not exist!\n") % uipath(f))
1349 rejected.append(f)
1348 rejected.append(f)
1350 continue
1349 continue
1351 limit = ui.configbytes('ui', 'large-file-limit')
1350 limit = ui.configbytes('ui', 'large-file-limit')
1352 if limit != 0 and st.st_size > limit:
1351 if limit != 0 and st.st_size > limit:
1353 ui.warn(_("%s: up to %d MB of RAM may be required "
1352 ui.warn(_("%s: up to %d MB of RAM may be required "
1354 "to manage this file\n"
1353 "to manage this file\n"
1355 "(use 'hg revert %s' to cancel the "
1354 "(use 'hg revert %s' to cancel the "
1356 "pending addition)\n")
1355 "pending addition)\n")
1357 % (f, 3 * st.st_size // 1000000, uipath(f)))
1356 % (f, 3 * st.st_size // 1000000, uipath(f)))
1358 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1357 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1359 ui.warn(_("%s not added: only files and symlinks "
1358 ui.warn(_("%s not added: only files and symlinks "
1360 "supported currently\n") % uipath(f))
1359 "supported currently\n") % uipath(f))
1361 rejected.append(f)
1360 rejected.append(f)
1362 elif ds[f] in 'amn':
1361 elif ds[f] in 'amn':
1363 ui.warn(_("%s already tracked!\n") % uipath(f))
1362 ui.warn(_("%s already tracked!\n") % uipath(f))
1364 elif ds[f] == 'r':
1363 elif ds[f] == 'r':
1365 ds.normallookup(f)
1364 ds.normallookup(f)
1366 else:
1365 else:
1367 ds.add(f)
1366 ds.add(f)
1368 return rejected
1367 return rejected
1369
1368
1370 def forget(self, files, prefix=""):
1369 def forget(self, files, prefix=""):
1371 with self._repo.wlock():
1370 with self._repo.wlock():
1372 ds = self._repo.dirstate
1371 ds = self._repo.dirstate
1373 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1372 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1374 rejected = []
1373 rejected = []
1375 for f in files:
1374 for f in files:
1376 if f not in self._repo.dirstate:
1375 if f not in self._repo.dirstate:
1377 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1376 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1378 rejected.append(f)
1377 rejected.append(f)
1379 elif self._repo.dirstate[f] != 'a':
1378 elif self._repo.dirstate[f] != 'a':
1380 self._repo.dirstate.remove(f)
1379 self._repo.dirstate.remove(f)
1381 else:
1380 else:
1382 self._repo.dirstate.drop(f)
1381 self._repo.dirstate.drop(f)
1383 return rejected
1382 return rejected
1384
1383
1385 def undelete(self, list):
1384 def undelete(self, list):
1386 pctxs = self.parents()
1385 pctxs = self.parents()
1387 with self._repo.wlock():
1386 with self._repo.wlock():
1388 ds = self._repo.dirstate
1387 ds = self._repo.dirstate
1389 for f in list:
1388 for f in list:
1390 if self._repo.dirstate[f] != 'r':
1389 if self._repo.dirstate[f] != 'r':
1391 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1390 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1392 else:
1391 else:
1393 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1392 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1394 t = fctx.data()
1393 t = fctx.data()
1395 self._repo.wwrite(f, t, fctx.flags())
1394 self._repo.wwrite(f, t, fctx.flags())
1396 self._repo.dirstate.normal(f)
1395 self._repo.dirstate.normal(f)
1397
1396
1398 def copy(self, source, dest):
1397 def copy(self, source, dest):
1399 try:
1398 try:
1400 st = self._repo.wvfs.lstat(dest)
1399 st = self._repo.wvfs.lstat(dest)
1401 except OSError as err:
1400 except OSError as err:
1402 if err.errno != errno.ENOENT:
1401 if err.errno != errno.ENOENT:
1403 raise
1402 raise
1404 self._repo.ui.warn(_("%s does not exist!\n")
1403 self._repo.ui.warn(_("%s does not exist!\n")
1405 % self._repo.dirstate.pathto(dest))
1404 % self._repo.dirstate.pathto(dest))
1406 return
1405 return
1407 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1406 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1408 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1407 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1409 "symbolic link\n")
1408 "symbolic link\n")
1410 % self._repo.dirstate.pathto(dest))
1409 % self._repo.dirstate.pathto(dest))
1411 else:
1410 else:
1412 with self._repo.wlock():
1411 with self._repo.wlock():
1413 if self._repo.dirstate[dest] in '?':
1412 if self._repo.dirstate[dest] in '?':
1414 self._repo.dirstate.add(dest)
1413 self._repo.dirstate.add(dest)
1415 elif self._repo.dirstate[dest] in 'r':
1414 elif self._repo.dirstate[dest] in 'r':
1416 self._repo.dirstate.normallookup(dest)
1415 self._repo.dirstate.normallookup(dest)
1417 self._repo.dirstate.copy(source, dest)
1416 self._repo.dirstate.copy(source, dest)
1418
1417
1419 def match(self, pats=None, include=None, exclude=None, default='glob',
1418 def match(self, pats=None, include=None, exclude=None, default='glob',
1420 listsubrepos=False, badfn=None):
1419 listsubrepos=False, badfn=None):
1421 r = self._repo
1420 r = self._repo
1422
1421
1423 # Only a case insensitive filesystem needs magic to translate user input
1422 # Only a case insensitive filesystem needs magic to translate user input
1424 # to actual case in the filesystem.
1423 # to actual case in the filesystem.
1425 icasefs = not util.fscasesensitive(r.root)
1424 icasefs = not util.fscasesensitive(r.root)
1426 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1425 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1427 default, auditor=r.auditor, ctx=self,
1426 default, auditor=r.auditor, ctx=self,
1428 listsubrepos=listsubrepos, badfn=badfn,
1427 listsubrepos=listsubrepos, badfn=badfn,
1429 icasefs=icasefs)
1428 icasefs=icasefs)
1430
1429
1431 def _filtersuspectsymlink(self, files):
1430 def _filtersuspectsymlink(self, files):
1432 if not files or self._repo.dirstate._checklink:
1431 if not files or self._repo.dirstate._checklink:
1433 return files
1432 return files
1434
1433
1435 # Symlink placeholders may get non-symlink-like contents
1434 # Symlink placeholders may get non-symlink-like contents
1436 # via user error or dereferencing by NFS or Samba servers,
1435 # via user error or dereferencing by NFS or Samba servers,
1437 # so we filter out any placeholders that don't look like a
1436 # so we filter out any placeholders that don't look like a
1438 # symlink
1437 # symlink
1439 sane = []
1438 sane = []
1440 for f in files:
1439 for f in files:
1441 if self.flags(f) == 'l':
1440 if self.flags(f) == 'l':
1442 d = self[f].data()
1441 d = self[f].data()
1443 if (d == '' or len(d) >= 1024 or '\n' in d
1442 if (d == '' or len(d) >= 1024 or '\n' in d
1444 or stringutil.binary(d)):
1443 or stringutil.binary(d)):
1445 self._repo.ui.debug('ignoring suspect symlink placeholder'
1444 self._repo.ui.debug('ignoring suspect symlink placeholder'
1446 ' "%s"\n' % f)
1445 ' "%s"\n' % f)
1447 continue
1446 continue
1448 sane.append(f)
1447 sane.append(f)
1449 return sane
1448 return sane
1450
1449
1451 def _checklookup(self, files):
1450 def _checklookup(self, files):
1452 # check for any possibly clean files
1451 # check for any possibly clean files
1453 if not files:
1452 if not files:
1454 return [], [], []
1453 return [], [], []
1455
1454
1456 modified = []
1455 modified = []
1457 deleted = []
1456 deleted = []
1458 fixup = []
1457 fixup = []
1459 pctx = self._parents[0]
1458 pctx = self._parents[0]
1460 # do a full compare of any files that might have changed
1459 # do a full compare of any files that might have changed
1461 for f in sorted(files):
1460 for f in sorted(files):
1462 try:
1461 try:
1463 # This will return True for a file that got replaced by a
1462 # This will return True for a file that got replaced by a
1464 # directory in the interim, but fixing that is pretty hard.
1463 # directory in the interim, but fixing that is pretty hard.
1465 if (f not in pctx or self.flags(f) != pctx.flags(f)
1464 if (f not in pctx or self.flags(f) != pctx.flags(f)
1466 or pctx[f].cmp(self[f])):
1465 or pctx[f].cmp(self[f])):
1467 modified.append(f)
1466 modified.append(f)
1468 else:
1467 else:
1469 fixup.append(f)
1468 fixup.append(f)
1470 except (IOError, OSError):
1469 except (IOError, OSError):
1471 # A file become inaccessible in between? Mark it as deleted,
1470 # A file become inaccessible in between? Mark it as deleted,
1472 # matching dirstate behavior (issue5584).
1471 # matching dirstate behavior (issue5584).
1473 # The dirstate has more complex behavior around whether a
1472 # The dirstate has more complex behavior around whether a
1474 # missing file matches a directory, etc, but we don't need to
1473 # missing file matches a directory, etc, but we don't need to
1475 # bother with that: if f has made it to this point, we're sure
1474 # bother with that: if f has made it to this point, we're sure
1476 # it's in the dirstate.
1475 # it's in the dirstate.
1477 deleted.append(f)
1476 deleted.append(f)
1478
1477
1479 return modified, deleted, fixup
1478 return modified, deleted, fixup
1480
1479
1481 def _poststatusfixup(self, status, fixup):
1480 def _poststatusfixup(self, status, fixup):
1482 """update dirstate for files that are actually clean"""
1481 """update dirstate for files that are actually clean"""
1483 poststatus = self._repo.postdsstatus()
1482 poststatus = self._repo.postdsstatus()
1484 if fixup or poststatus:
1483 if fixup or poststatus:
1485 try:
1484 try:
1486 oldid = self._repo.dirstate.identity()
1485 oldid = self._repo.dirstate.identity()
1487
1486
1488 # updating the dirstate is optional
1487 # updating the dirstate is optional
1489 # so we don't wait on the lock
1488 # so we don't wait on the lock
1490 # wlock can invalidate the dirstate, so cache normal _after_
1489 # wlock can invalidate the dirstate, so cache normal _after_
1491 # taking the lock
1490 # taking the lock
1492 with self._repo.wlock(False):
1491 with self._repo.wlock(False):
1493 if self._repo.dirstate.identity() == oldid:
1492 if self._repo.dirstate.identity() == oldid:
1494 if fixup:
1493 if fixup:
1495 normal = self._repo.dirstate.normal
1494 normal = self._repo.dirstate.normal
1496 for f in fixup:
1495 for f in fixup:
1497 normal(f)
1496 normal(f)
1498 # write changes out explicitly, because nesting
1497 # write changes out explicitly, because nesting
1499 # wlock at runtime may prevent 'wlock.release()'
1498 # wlock at runtime may prevent 'wlock.release()'
1500 # after this block from doing so for subsequent
1499 # after this block from doing so for subsequent
1501 # changing files
1500 # changing files
1502 tr = self._repo.currenttransaction()
1501 tr = self._repo.currenttransaction()
1503 self._repo.dirstate.write(tr)
1502 self._repo.dirstate.write(tr)
1504
1503
1505 if poststatus:
1504 if poststatus:
1506 for ps in poststatus:
1505 for ps in poststatus:
1507 ps(self, status)
1506 ps(self, status)
1508 else:
1507 else:
1509 # in this case, writing changes out breaks
1508 # in this case, writing changes out breaks
1510 # consistency, because .hg/dirstate was
1509 # consistency, because .hg/dirstate was
1511 # already changed simultaneously after last
1510 # already changed simultaneously after last
1512 # caching (see also issue5584 for detail)
1511 # caching (see also issue5584 for detail)
1513 self._repo.ui.debug('skip updating dirstate: '
1512 self._repo.ui.debug('skip updating dirstate: '
1514 'identity mismatch\n')
1513 'identity mismatch\n')
1515 except error.LockError:
1514 except error.LockError:
1516 pass
1515 pass
1517 finally:
1516 finally:
1518 # Even if the wlock couldn't be grabbed, clear out the list.
1517 # Even if the wlock couldn't be grabbed, clear out the list.
1519 self._repo.clearpostdsstatus()
1518 self._repo.clearpostdsstatus()
1520
1519
1521 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1520 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1522 '''Gets the status from the dirstate -- internal use only.'''
1521 '''Gets the status from the dirstate -- internal use only.'''
1523 subrepos = []
1522 subrepos = []
1524 if '.hgsub' in self:
1523 if '.hgsub' in self:
1525 subrepos = sorted(self.substate)
1524 subrepos = sorted(self.substate)
1526 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1525 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1527 clean=clean, unknown=unknown)
1526 clean=clean, unknown=unknown)
1528
1527
1529 # check for any possibly clean files
1528 # check for any possibly clean files
1530 fixup = []
1529 fixup = []
1531 if cmp:
1530 if cmp:
1532 modified2, deleted2, fixup = self._checklookup(cmp)
1531 modified2, deleted2, fixup = self._checklookup(cmp)
1533 s.modified.extend(modified2)
1532 s.modified.extend(modified2)
1534 s.deleted.extend(deleted2)
1533 s.deleted.extend(deleted2)
1535
1534
1536 if fixup and clean:
1535 if fixup and clean:
1537 s.clean.extend(fixup)
1536 s.clean.extend(fixup)
1538
1537
1539 self._poststatusfixup(s, fixup)
1538 self._poststatusfixup(s, fixup)
1540
1539
1541 if match.always():
1540 if match.always():
1542 # cache for performance
1541 # cache for performance
1543 if s.unknown or s.ignored or s.clean:
1542 if s.unknown or s.ignored or s.clean:
1544 # "_status" is cached with list*=False in the normal route
1543 # "_status" is cached with list*=False in the normal route
1545 self._status = scmutil.status(s.modified, s.added, s.removed,
1544 self._status = scmutil.status(s.modified, s.added, s.removed,
1546 s.deleted, [], [], [])
1545 s.deleted, [], [], [])
1547 else:
1546 else:
1548 self._status = s
1547 self._status = s
1549
1548
1550 return s
1549 return s
1551
1550
1552 @propertycache
1551 @propertycache
1553 def _manifest(self):
1552 def _manifest(self):
1554 """generate a manifest corresponding to the values in self._status
1553 """generate a manifest corresponding to the values in self._status
1555
1554
1556 This reuse the file nodeid from parent, but we use special node
1555 This reuse the file nodeid from parent, but we use special node
1557 identifiers for added and modified files. This is used by manifests
1556 identifiers for added and modified files. This is used by manifests
1558 merge to see that files are different and by update logic to avoid
1557 merge to see that files are different and by update logic to avoid
1559 deleting newly added files.
1558 deleting newly added files.
1560 """
1559 """
1561 return self._buildstatusmanifest(self._status)
1560 return self._buildstatusmanifest(self._status)
1562
1561
1563 def _buildstatusmanifest(self, status):
1562 def _buildstatusmanifest(self, status):
1564 """Builds a manifest that includes the given status results."""
1563 """Builds a manifest that includes the given status results."""
1565 parents = self.parents()
1564 parents = self.parents()
1566
1565
1567 man = parents[0].manifest().copy()
1566 man = parents[0].manifest().copy()
1568
1567
1569 ff = self._flagfunc
1568 ff = self._flagfunc
1570 for i, l in ((addednodeid, status.added),
1569 for i, l in ((addednodeid, status.added),
1571 (modifiednodeid, status.modified)):
1570 (modifiednodeid, status.modified)):
1572 for f in l:
1571 for f in l:
1573 man[f] = i
1572 man[f] = i
1574 try:
1573 try:
1575 man.setflag(f, ff(f))
1574 man.setflag(f, ff(f))
1576 except OSError:
1575 except OSError:
1577 pass
1576 pass
1578
1577
1579 for f in status.deleted + status.removed:
1578 for f in status.deleted + status.removed:
1580 if f in man:
1579 if f in man:
1581 del man[f]
1580 del man[f]
1582
1581
1583 return man
1582 return man
1584
1583
1585 def _buildstatus(self, other, s, match, listignored, listclean,
1584 def _buildstatus(self, other, s, match, listignored, listclean,
1586 listunknown):
1585 listunknown):
1587 """build a status with respect to another context
1586 """build a status with respect to another context
1588
1587
1589 This includes logic for maintaining the fast path of status when
1588 This includes logic for maintaining the fast path of status when
1590 comparing the working directory against its parent, which is to skip
1589 comparing the working directory against its parent, which is to skip
1591 building a new manifest if self (working directory) is not comparing
1590 building a new manifest if self (working directory) is not comparing
1592 against its parent (repo['.']).
1591 against its parent (repo['.']).
1593 """
1592 """
1594 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1593 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1595 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1594 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1596 # might have accidentally ended up with the entire contents of the file
1595 # might have accidentally ended up with the entire contents of the file
1597 # they are supposed to be linking to.
1596 # they are supposed to be linking to.
1598 s.modified[:] = self._filtersuspectsymlink(s.modified)
1597 s.modified[:] = self._filtersuspectsymlink(s.modified)
1599 if other != self._repo['.']:
1598 if other != self._repo['.']:
1600 s = super(workingctx, self)._buildstatus(other, s, match,
1599 s = super(workingctx, self)._buildstatus(other, s, match,
1601 listignored, listclean,
1600 listignored, listclean,
1602 listunknown)
1601 listunknown)
1603 return s
1602 return s
1604
1603
1605 def _matchstatus(self, other, match):
1604 def _matchstatus(self, other, match):
1606 """override the match method with a filter for directory patterns
1605 """override the match method with a filter for directory patterns
1607
1606
1608 We use inheritance to customize the match.bad method only in cases of
1607 We use inheritance to customize the match.bad method only in cases of
1609 workingctx since it belongs only to the working directory when
1608 workingctx since it belongs only to the working directory when
1610 comparing against the parent changeset.
1609 comparing against the parent changeset.
1611
1610
1612 If we aren't comparing against the working directory's parent, then we
1611 If we aren't comparing against the working directory's parent, then we
1613 just use the default match object sent to us.
1612 just use the default match object sent to us.
1614 """
1613 """
1615 if other != self._repo['.']:
1614 if other != self._repo['.']:
1616 def bad(f, msg):
1615 def bad(f, msg):
1617 # 'f' may be a directory pattern from 'match.files()',
1616 # 'f' may be a directory pattern from 'match.files()',
1618 # so 'f not in ctx1' is not enough
1617 # so 'f not in ctx1' is not enough
1619 if f not in other and not other.hasdir(f):
1618 if f not in other and not other.hasdir(f):
1620 self._repo.ui.warn('%s: %s\n' %
1619 self._repo.ui.warn('%s: %s\n' %
1621 (self._repo.dirstate.pathto(f), msg))
1620 (self._repo.dirstate.pathto(f), msg))
1622 match.bad = bad
1621 match.bad = bad
1623 return match
1622 return match
1624
1623
1625 def markcommitted(self, node):
1624 def markcommitted(self, node):
1626 super(workingctx, self).markcommitted(node)
1625 super(workingctx, self).markcommitted(node)
1627
1626
1628 sparse.aftercommit(self._repo, node)
1627 sparse.aftercommit(self._repo, node)
1629
1628
1630 class committablefilectx(basefilectx):
1629 class committablefilectx(basefilectx):
1631 """A committablefilectx provides common functionality for a file context
1630 """A committablefilectx provides common functionality for a file context
1632 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1631 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1633 def __init__(self, repo, path, filelog=None, ctx=None):
1632 def __init__(self, repo, path, filelog=None, ctx=None):
1634 self._repo = repo
1633 self._repo = repo
1635 self._path = path
1634 self._path = path
1636 self._changeid = None
1635 self._changeid = None
1637 self._filerev = self._filenode = None
1636 self._filerev = self._filenode = None
1638
1637
1639 if filelog is not None:
1638 if filelog is not None:
1640 self._filelog = filelog
1639 self._filelog = filelog
1641 if ctx:
1640 if ctx:
1642 self._changectx = ctx
1641 self._changectx = ctx
1643
1642
1644 def __nonzero__(self):
1643 def __nonzero__(self):
1645 return True
1644 return True
1646
1645
1647 __bool__ = __nonzero__
1646 __bool__ = __nonzero__
1648
1647
1649 def linkrev(self):
1648 def linkrev(self):
1650 # linked to self._changectx no matter if file is modified or not
1649 # linked to self._changectx no matter if file is modified or not
1651 return self.rev()
1650 return self.rev()
1652
1651
1653 def parents(self):
1652 def parents(self):
1654 '''return parent filectxs, following copies if necessary'''
1653 '''return parent filectxs, following copies if necessary'''
1655 def filenode(ctx, path):
1654 def filenode(ctx, path):
1656 return ctx._manifest.get(path, nullid)
1655 return ctx._manifest.get(path, nullid)
1657
1656
1658 path = self._path
1657 path = self._path
1659 fl = self._filelog
1658 fl = self._filelog
1660 pcl = self._changectx._parents
1659 pcl = self._changectx._parents
1661 renamed = self.renamed()
1660 renamed = self.renamed()
1662
1661
1663 if renamed:
1662 if renamed:
1664 pl = [renamed + (None,)]
1663 pl = [renamed + (None,)]
1665 else:
1664 else:
1666 pl = [(path, filenode(pcl[0], path), fl)]
1665 pl = [(path, filenode(pcl[0], path), fl)]
1667
1666
1668 for pc in pcl[1:]:
1667 for pc in pcl[1:]:
1669 pl.append((path, filenode(pc, path), fl))
1668 pl.append((path, filenode(pc, path), fl))
1670
1669
1671 return [self._parentfilectx(p, fileid=n, filelog=l)
1670 return [self._parentfilectx(p, fileid=n, filelog=l)
1672 for p, n, l in pl if n != nullid]
1671 for p, n, l in pl if n != nullid]
1673
1672
1674 def children(self):
1673 def children(self):
1675 return []
1674 return []
1676
1675
1677 class workingfilectx(committablefilectx):
1676 class workingfilectx(committablefilectx):
1678 """A workingfilectx object makes access to data related to a particular
1677 """A workingfilectx object makes access to data related to a particular
1679 file in the working directory convenient."""
1678 file in the working directory convenient."""
1680 def __init__(self, repo, path, filelog=None, workingctx=None):
1679 def __init__(self, repo, path, filelog=None, workingctx=None):
1681 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1680 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1682
1681
1683 @propertycache
1682 @propertycache
1684 def _changectx(self):
1683 def _changectx(self):
1685 return workingctx(self._repo)
1684 return workingctx(self._repo)
1686
1685
1687 def data(self):
1686 def data(self):
1688 return self._repo.wread(self._path)
1687 return self._repo.wread(self._path)
1689 def renamed(self):
1688 def renamed(self):
1690 rp = self._repo.dirstate.copied(self._path)
1689 rp = self._repo.dirstate.copied(self._path)
1691 if not rp:
1690 if not rp:
1692 return None
1691 return None
1693 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1692 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1694
1693
1695 def size(self):
1694 def size(self):
1696 return self._repo.wvfs.lstat(self._path).st_size
1695 return self._repo.wvfs.lstat(self._path).st_size
1697 def date(self):
1696 def date(self):
1698 t, tz = self._changectx.date()
1697 t, tz = self._changectx.date()
1699 try:
1698 try:
1700 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1699 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1701 except OSError as err:
1700 except OSError as err:
1702 if err.errno != errno.ENOENT:
1701 if err.errno != errno.ENOENT:
1703 raise
1702 raise
1704 return (t, tz)
1703 return (t, tz)
1705
1704
1706 def exists(self):
1705 def exists(self):
1707 return self._repo.wvfs.exists(self._path)
1706 return self._repo.wvfs.exists(self._path)
1708
1707
1709 def lexists(self):
1708 def lexists(self):
1710 return self._repo.wvfs.lexists(self._path)
1709 return self._repo.wvfs.lexists(self._path)
1711
1710
1712 def audit(self):
1711 def audit(self):
1713 return self._repo.wvfs.audit(self._path)
1712 return self._repo.wvfs.audit(self._path)
1714
1713
1715 def cmp(self, fctx):
1714 def cmp(self, fctx):
1716 """compare with other file context
1715 """compare with other file context
1717
1716
1718 returns True if different than fctx.
1717 returns True if different than fctx.
1719 """
1718 """
1720 # fctx should be a filectx (not a workingfilectx)
1719 # fctx should be a filectx (not a workingfilectx)
1721 # invert comparison to reuse the same code path
1720 # invert comparison to reuse the same code path
1722 return fctx.cmp(self)
1721 return fctx.cmp(self)
1723
1722
1724 def remove(self, ignoremissing=False):
1723 def remove(self, ignoremissing=False):
1725 """wraps unlink for a repo's working directory"""
1724 """wraps unlink for a repo's working directory"""
1726 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1725 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1727 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1726 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1728 rmdir=rmdir)
1727 rmdir=rmdir)
1729
1728
1730 def write(self, data, flags, backgroundclose=False, **kwargs):
1729 def write(self, data, flags, backgroundclose=False, **kwargs):
1731 """wraps repo.wwrite"""
1730 """wraps repo.wwrite"""
1732 self._repo.wwrite(self._path, data, flags,
1731 self._repo.wwrite(self._path, data, flags,
1733 backgroundclose=backgroundclose,
1732 backgroundclose=backgroundclose,
1734 **kwargs)
1733 **kwargs)
1735
1734
1736 def markcopied(self, src):
1735 def markcopied(self, src):
1737 """marks this file a copy of `src`"""
1736 """marks this file a copy of `src`"""
1738 if self._repo.dirstate[self._path] in "nma":
1737 if self._repo.dirstate[self._path] in "nma":
1739 self._repo.dirstate.copy(src, self._path)
1738 self._repo.dirstate.copy(src, self._path)
1740
1739
1741 def clearunknown(self):
1740 def clearunknown(self):
1742 """Removes conflicting items in the working directory so that
1741 """Removes conflicting items in the working directory so that
1743 ``write()`` can be called successfully.
1742 ``write()`` can be called successfully.
1744 """
1743 """
1745 wvfs = self._repo.wvfs
1744 wvfs = self._repo.wvfs
1746 f = self._path
1745 f = self._path
1747 wvfs.audit(f)
1746 wvfs.audit(f)
1748 if wvfs.isdir(f) and not wvfs.islink(f):
1747 if wvfs.isdir(f) and not wvfs.islink(f):
1749 wvfs.rmtree(f, forcibly=True)
1748 wvfs.rmtree(f, forcibly=True)
1750 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1749 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1751 for p in reversed(list(util.finddirs(f))):
1750 for p in reversed(list(util.finddirs(f))):
1752 if wvfs.isfileorlink(p):
1751 if wvfs.isfileorlink(p):
1753 wvfs.unlink(p)
1752 wvfs.unlink(p)
1754 break
1753 break
1755
1754
1756 def setflags(self, l, x):
1755 def setflags(self, l, x):
1757 self._repo.wvfs.setflags(self._path, l, x)
1756 self._repo.wvfs.setflags(self._path, l, x)
1758
1757
1759 class overlayworkingctx(committablectx):
1758 class overlayworkingctx(committablectx):
1760 """Wraps another mutable context with a write-back cache that can be
1759 """Wraps another mutable context with a write-back cache that can be
1761 converted into a commit context.
1760 converted into a commit context.
1762
1761
1763 self._cache[path] maps to a dict with keys: {
1762 self._cache[path] maps to a dict with keys: {
1764 'exists': bool?
1763 'exists': bool?
1765 'date': date?
1764 'date': date?
1766 'data': str?
1765 'data': str?
1767 'flags': str?
1766 'flags': str?
1768 'copied': str? (path or None)
1767 'copied': str? (path or None)
1769 }
1768 }
1770 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1769 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1771 is `False`, the file was deleted.
1770 is `False`, the file was deleted.
1772 """
1771 """
1773
1772
1774 def __init__(self, repo):
1773 def __init__(self, repo):
1775 super(overlayworkingctx, self).__init__(repo)
1774 super(overlayworkingctx, self).__init__(repo)
1776 self.clean()
1775 self.clean()
1777
1776
1778 def setbase(self, wrappedctx):
1777 def setbase(self, wrappedctx):
1779 self._wrappedctx = wrappedctx
1778 self._wrappedctx = wrappedctx
1780 self._parents = [wrappedctx]
1779 self._parents = [wrappedctx]
1781 # Drop old manifest cache as it is now out of date.
1780 # Drop old manifest cache as it is now out of date.
1782 # This is necessary when, e.g., rebasing several nodes with one
1781 # This is necessary when, e.g., rebasing several nodes with one
1783 # ``overlayworkingctx`` (e.g. with --collapse).
1782 # ``overlayworkingctx`` (e.g. with --collapse).
1784 util.clearcachedproperty(self, '_manifest')
1783 util.clearcachedproperty(self, '_manifest')
1785
1784
1786 def data(self, path):
1785 def data(self, path):
1787 if self.isdirty(path):
1786 if self.isdirty(path):
1788 if self._cache[path]['exists']:
1787 if self._cache[path]['exists']:
1789 if self._cache[path]['data']:
1788 if self._cache[path]['data']:
1790 return self._cache[path]['data']
1789 return self._cache[path]['data']
1791 else:
1790 else:
1792 # Must fallback here, too, because we only set flags.
1791 # Must fallback here, too, because we only set flags.
1793 return self._wrappedctx[path].data()
1792 return self._wrappedctx[path].data()
1794 else:
1793 else:
1795 raise error.ProgrammingError("No such file or directory: %s" %
1794 raise error.ProgrammingError("No such file or directory: %s" %
1796 path)
1795 path)
1797 else:
1796 else:
1798 return self._wrappedctx[path].data()
1797 return self._wrappedctx[path].data()
1799
1798
1800 @propertycache
1799 @propertycache
1801 def _manifest(self):
1800 def _manifest(self):
1802 parents = self.parents()
1801 parents = self.parents()
1803 man = parents[0].manifest().copy()
1802 man = parents[0].manifest().copy()
1804
1803
1805 flag = self._flagfunc
1804 flag = self._flagfunc
1806 for path in self.added():
1805 for path in self.added():
1807 man[path] = addednodeid
1806 man[path] = addednodeid
1808 man.setflag(path, flag(path))
1807 man.setflag(path, flag(path))
1809 for path in self.modified():
1808 for path in self.modified():
1810 man[path] = modifiednodeid
1809 man[path] = modifiednodeid
1811 man.setflag(path, flag(path))
1810 man.setflag(path, flag(path))
1812 for path in self.removed():
1811 for path in self.removed():
1813 del man[path]
1812 del man[path]
1814 return man
1813 return man
1815
1814
1816 @propertycache
1815 @propertycache
1817 def _flagfunc(self):
1816 def _flagfunc(self):
1818 def f(path):
1817 def f(path):
1819 return self._cache[path]['flags']
1818 return self._cache[path]['flags']
1820 return f
1819 return f
1821
1820
1822 def files(self):
1821 def files(self):
1823 return sorted(self.added() + self.modified() + self.removed())
1822 return sorted(self.added() + self.modified() + self.removed())
1824
1823
1825 def modified(self):
1824 def modified(self):
1826 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1825 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1827 self._existsinparent(f)]
1826 self._existsinparent(f)]
1828
1827
1829 def added(self):
1828 def added(self):
1830 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1829 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1831 not self._existsinparent(f)]
1830 not self._existsinparent(f)]
1832
1831
1833 def removed(self):
1832 def removed(self):
1834 return [f for f in self._cache.keys() if
1833 return [f for f in self._cache.keys() if
1835 not self._cache[f]['exists'] and self._existsinparent(f)]
1834 not self._cache[f]['exists'] and self._existsinparent(f)]
1836
1835
1837 def isinmemory(self):
1836 def isinmemory(self):
1838 return True
1837 return True
1839
1838
1840 def filedate(self, path):
1839 def filedate(self, path):
1841 if self.isdirty(path):
1840 if self.isdirty(path):
1842 return self._cache[path]['date']
1841 return self._cache[path]['date']
1843 else:
1842 else:
1844 return self._wrappedctx[path].date()
1843 return self._wrappedctx[path].date()
1845
1844
1846 def markcopied(self, path, origin):
1845 def markcopied(self, path, origin):
1847 if self.isdirty(path):
1846 if self.isdirty(path):
1848 self._cache[path]['copied'] = origin
1847 self._cache[path]['copied'] = origin
1849 else:
1848 else:
1850 raise error.ProgrammingError('markcopied() called on clean context')
1849 raise error.ProgrammingError('markcopied() called on clean context')
1851
1850
1852 def copydata(self, path):
1851 def copydata(self, path):
1853 if self.isdirty(path):
1852 if self.isdirty(path):
1854 return self._cache[path]['copied']
1853 return self._cache[path]['copied']
1855 else:
1854 else:
1856 raise error.ProgrammingError('copydata() called on clean context')
1855 raise error.ProgrammingError('copydata() called on clean context')
1857
1856
1858 def flags(self, path):
1857 def flags(self, path):
1859 if self.isdirty(path):
1858 if self.isdirty(path):
1860 if self._cache[path]['exists']:
1859 if self._cache[path]['exists']:
1861 return self._cache[path]['flags']
1860 return self._cache[path]['flags']
1862 else:
1861 else:
1863 raise error.ProgrammingError("No such file or directory: %s" %
1862 raise error.ProgrammingError("No such file or directory: %s" %
1864 self._path)
1863 self._path)
1865 else:
1864 else:
1866 return self._wrappedctx[path].flags()
1865 return self._wrappedctx[path].flags()
1867
1866
1868 def _existsinparent(self, path):
1867 def _existsinparent(self, path):
1869 try:
1868 try:
1870 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1869 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1871 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1870 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1872 # with an ``exists()`` function.
1871 # with an ``exists()`` function.
1873 self._wrappedctx[path]
1872 self._wrappedctx[path]
1874 return True
1873 return True
1875 except error.ManifestLookupError:
1874 except error.ManifestLookupError:
1876 return False
1875 return False
1877
1876
1878 def _auditconflicts(self, path):
1877 def _auditconflicts(self, path):
1879 """Replicates conflict checks done by wvfs.write().
1878 """Replicates conflict checks done by wvfs.write().
1880
1879
1881 Since we never write to the filesystem and never call `applyupdates` in
1880 Since we never write to the filesystem and never call `applyupdates` in
1882 IMM, we'll never check that a path is actually writable -- e.g., because
1881 IMM, we'll never check that a path is actually writable -- e.g., because
1883 it adds `a/foo`, but `a` is actually a file in the other commit.
1882 it adds `a/foo`, but `a` is actually a file in the other commit.
1884 """
1883 """
1885 def fail(path, component):
1884 def fail(path, component):
1886 # p1() is the base and we're receiving "writes" for p2()'s
1885 # p1() is the base and we're receiving "writes" for p2()'s
1887 # files.
1886 # files.
1888 if 'l' in self.p1()[component].flags():
1887 if 'l' in self.p1()[component].flags():
1889 raise error.Abort("error: %s conflicts with symlink %s "
1888 raise error.Abort("error: %s conflicts with symlink %s "
1890 "in %s." % (path, component,
1889 "in %s." % (path, component,
1891 self.p1().rev()))
1890 self.p1().rev()))
1892 else:
1891 else:
1893 raise error.Abort("error: '%s' conflicts with file '%s' in "
1892 raise error.Abort("error: '%s' conflicts with file '%s' in "
1894 "%s." % (path, component,
1893 "%s." % (path, component,
1895 self.p1().rev()))
1894 self.p1().rev()))
1896
1895
1897 # Test that each new directory to be created to write this path from p2
1896 # Test that each new directory to be created to write this path from p2
1898 # is not a file in p1.
1897 # is not a file in p1.
1899 components = path.split('/')
1898 components = path.split('/')
1900 for i in xrange(len(components)):
1899 for i in xrange(len(components)):
1901 component = "/".join(components[0:i])
1900 component = "/".join(components[0:i])
1902 if component in self.p1():
1901 if component in self.p1():
1903 fail(path, component)
1902 fail(path, component)
1904
1903
1905 # Test the other direction -- that this path from p2 isn't a directory
1904 # Test the other direction -- that this path from p2 isn't a directory
1906 # in p1 (test that p1 doesn't any paths matching `path/*`).
1905 # in p1 (test that p1 doesn't any paths matching `path/*`).
1907 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1906 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1908 matches = self.p1().manifest().matches(match)
1907 matches = self.p1().manifest().matches(match)
1909 if len(matches) > 0:
1908 if len(matches) > 0:
1910 if len(matches) == 1 and matches.keys()[0] == path:
1909 if len(matches) == 1 and matches.keys()[0] == path:
1911 return
1910 return
1912 raise error.Abort("error: file '%s' cannot be written because "
1911 raise error.Abort("error: file '%s' cannot be written because "
1913 " '%s/' is a folder in %s (containing %d "
1912 " '%s/' is a folder in %s (containing %d "
1914 "entries: %s)"
1913 "entries: %s)"
1915 % (path, path, self.p1(), len(matches),
1914 % (path, path, self.p1(), len(matches),
1916 ', '.join(matches.keys())))
1915 ', '.join(matches.keys())))
1917
1916
1918 def write(self, path, data, flags='', **kwargs):
1917 def write(self, path, data, flags='', **kwargs):
1919 if data is None:
1918 if data is None:
1920 raise error.ProgrammingError("data must be non-None")
1919 raise error.ProgrammingError("data must be non-None")
1921 self._auditconflicts(path)
1920 self._auditconflicts(path)
1922 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1921 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1923 flags=flags)
1922 flags=flags)
1924
1923
1925 def setflags(self, path, l, x):
1924 def setflags(self, path, l, x):
1926 self._markdirty(path, exists=True, date=dateutil.makedate(),
1925 self._markdirty(path, exists=True, date=dateutil.makedate(),
1927 flags=(l and 'l' or '') + (x and 'x' or ''))
1926 flags=(l and 'l' or '') + (x and 'x' or ''))
1928
1927
1929 def remove(self, path):
1928 def remove(self, path):
1930 self._markdirty(path, exists=False)
1929 self._markdirty(path, exists=False)
1931
1930
1932 def exists(self, path):
1931 def exists(self, path):
1933 """exists behaves like `lexists`, but needs to follow symlinks and
1932 """exists behaves like `lexists`, but needs to follow symlinks and
1934 return False if they are broken.
1933 return False if they are broken.
1935 """
1934 """
1936 if self.isdirty(path):
1935 if self.isdirty(path):
1937 # If this path exists and is a symlink, "follow" it by calling
1936 # If this path exists and is a symlink, "follow" it by calling
1938 # exists on the destination path.
1937 # exists on the destination path.
1939 if (self._cache[path]['exists'] and
1938 if (self._cache[path]['exists'] and
1940 'l' in self._cache[path]['flags']):
1939 'l' in self._cache[path]['flags']):
1941 return self.exists(self._cache[path]['data'].strip())
1940 return self.exists(self._cache[path]['data'].strip())
1942 else:
1941 else:
1943 return self._cache[path]['exists']
1942 return self._cache[path]['exists']
1944
1943
1945 return self._existsinparent(path)
1944 return self._existsinparent(path)
1946
1945
1947 def lexists(self, path):
1946 def lexists(self, path):
1948 """lexists returns True if the path exists"""
1947 """lexists returns True if the path exists"""
1949 if self.isdirty(path):
1948 if self.isdirty(path):
1950 return self._cache[path]['exists']
1949 return self._cache[path]['exists']
1951
1950
1952 return self._existsinparent(path)
1951 return self._existsinparent(path)
1953
1952
1954 def size(self, path):
1953 def size(self, path):
1955 if self.isdirty(path):
1954 if self.isdirty(path):
1956 if self._cache[path]['exists']:
1955 if self._cache[path]['exists']:
1957 return len(self._cache[path]['data'])
1956 return len(self._cache[path]['data'])
1958 else:
1957 else:
1959 raise error.ProgrammingError("No such file or directory: %s" %
1958 raise error.ProgrammingError("No such file or directory: %s" %
1960 self._path)
1959 self._path)
1961 return self._wrappedctx[path].size()
1960 return self._wrappedctx[path].size()
1962
1961
1963 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1962 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1964 user=None, editor=None):
1963 user=None, editor=None):
1965 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1964 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1966 committed.
1965 committed.
1967
1966
1968 ``text`` is the commit message.
1967 ``text`` is the commit message.
1969 ``parents`` (optional) are rev numbers.
1968 ``parents`` (optional) are rev numbers.
1970 """
1969 """
1971 # Default parents to the wrapped contexts' if not passed.
1970 # Default parents to the wrapped contexts' if not passed.
1972 if parents is None:
1971 if parents is None:
1973 parents = self._wrappedctx.parents()
1972 parents = self._wrappedctx.parents()
1974 if len(parents) == 1:
1973 if len(parents) == 1:
1975 parents = (parents[0], None)
1974 parents = (parents[0], None)
1976
1975
1977 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1976 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1978 if parents[1] is None:
1977 if parents[1] is None:
1979 parents = (self._repo[parents[0]], None)
1978 parents = (self._repo[parents[0]], None)
1980 else:
1979 else:
1981 parents = (self._repo[parents[0]], self._repo[parents[1]])
1980 parents = (self._repo[parents[0]], self._repo[parents[1]])
1982
1981
1983 files = self._cache.keys()
1982 files = self._cache.keys()
1984 def getfile(repo, memctx, path):
1983 def getfile(repo, memctx, path):
1985 if self._cache[path]['exists']:
1984 if self._cache[path]['exists']:
1986 return memfilectx(repo, memctx, path,
1985 return memfilectx(repo, memctx, path,
1987 self._cache[path]['data'],
1986 self._cache[path]['data'],
1988 'l' in self._cache[path]['flags'],
1987 'l' in self._cache[path]['flags'],
1989 'x' in self._cache[path]['flags'],
1988 'x' in self._cache[path]['flags'],
1990 self._cache[path]['copied'])
1989 self._cache[path]['copied'])
1991 else:
1990 else:
1992 # Returning None, but including the path in `files`, is
1991 # Returning None, but including the path in `files`, is
1993 # necessary for memctx to register a deletion.
1992 # necessary for memctx to register a deletion.
1994 return None
1993 return None
1995 return memctx(self._repo, parents, text, files, getfile, date=date,
1994 return memctx(self._repo, parents, text, files, getfile, date=date,
1996 extra=extra, user=user, branch=branch, editor=editor)
1995 extra=extra, user=user, branch=branch, editor=editor)
1997
1996
1998 def isdirty(self, path):
1997 def isdirty(self, path):
1999 return path in self._cache
1998 return path in self._cache
2000
1999
2001 def isempty(self):
2000 def isempty(self):
2002 # We need to discard any keys that are actually clean before the empty
2001 # We need to discard any keys that are actually clean before the empty
2003 # commit check.
2002 # commit check.
2004 self._compact()
2003 self._compact()
2005 return len(self._cache) == 0
2004 return len(self._cache) == 0
2006
2005
2007 def clean(self):
2006 def clean(self):
2008 self._cache = {}
2007 self._cache = {}
2009
2008
2010 def _compact(self):
2009 def _compact(self):
2011 """Removes keys from the cache that are actually clean, by comparing
2010 """Removes keys from the cache that are actually clean, by comparing
2012 them with the underlying context.
2011 them with the underlying context.
2013
2012
2014 This can occur during the merge process, e.g. by passing --tool :local
2013 This can occur during the merge process, e.g. by passing --tool :local
2015 to resolve a conflict.
2014 to resolve a conflict.
2016 """
2015 """
2017 keys = []
2016 keys = []
2018 for path in self._cache.keys():
2017 for path in self._cache.keys():
2019 cache = self._cache[path]
2018 cache = self._cache[path]
2020 try:
2019 try:
2021 underlying = self._wrappedctx[path]
2020 underlying = self._wrappedctx[path]
2022 if (underlying.data() == cache['data'] and
2021 if (underlying.data() == cache['data'] and
2023 underlying.flags() == cache['flags']):
2022 underlying.flags() == cache['flags']):
2024 keys.append(path)
2023 keys.append(path)
2025 except error.ManifestLookupError:
2024 except error.ManifestLookupError:
2026 # Path not in the underlying manifest (created).
2025 # Path not in the underlying manifest (created).
2027 continue
2026 continue
2028
2027
2029 for path in keys:
2028 for path in keys:
2030 del self._cache[path]
2029 del self._cache[path]
2031 return keys
2030 return keys
2032
2031
2033 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2032 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2034 self._cache[path] = {
2033 self._cache[path] = {
2035 'exists': exists,
2034 'exists': exists,
2036 'data': data,
2035 'data': data,
2037 'date': date,
2036 'date': date,
2038 'flags': flags,
2037 'flags': flags,
2039 'copied': None,
2038 'copied': None,
2040 }
2039 }
2041
2040
2042 def filectx(self, path, filelog=None):
2041 def filectx(self, path, filelog=None):
2043 return overlayworkingfilectx(self._repo, path, parent=self,
2042 return overlayworkingfilectx(self._repo, path, parent=self,
2044 filelog=filelog)
2043 filelog=filelog)
2045
2044
2046 class overlayworkingfilectx(committablefilectx):
2045 class overlayworkingfilectx(committablefilectx):
2047 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2046 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2048 cache, which can be flushed through later by calling ``flush()``."""
2047 cache, which can be flushed through later by calling ``flush()``."""
2049
2048
2050 def __init__(self, repo, path, filelog=None, parent=None):
2049 def __init__(self, repo, path, filelog=None, parent=None):
2051 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2050 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2052 parent)
2051 parent)
2053 self._repo = repo
2052 self._repo = repo
2054 self._parent = parent
2053 self._parent = parent
2055 self._path = path
2054 self._path = path
2056
2055
2057 def cmp(self, fctx):
2056 def cmp(self, fctx):
2058 return self.data() != fctx.data()
2057 return self.data() != fctx.data()
2059
2058
2060 def changectx(self):
2059 def changectx(self):
2061 return self._parent
2060 return self._parent
2062
2061
2063 def data(self):
2062 def data(self):
2064 return self._parent.data(self._path)
2063 return self._parent.data(self._path)
2065
2064
2066 def date(self):
2065 def date(self):
2067 return self._parent.filedate(self._path)
2066 return self._parent.filedate(self._path)
2068
2067
2069 def exists(self):
2068 def exists(self):
2070 return self.lexists()
2069 return self.lexists()
2071
2070
2072 def lexists(self):
2071 def lexists(self):
2073 return self._parent.exists(self._path)
2072 return self._parent.exists(self._path)
2074
2073
2075 def renamed(self):
2074 def renamed(self):
2076 path = self._parent.copydata(self._path)
2075 path = self._parent.copydata(self._path)
2077 if not path:
2076 if not path:
2078 return None
2077 return None
2079 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2078 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2080
2079
2081 def size(self):
2080 def size(self):
2082 return self._parent.size(self._path)
2081 return self._parent.size(self._path)
2083
2082
2084 def markcopied(self, origin):
2083 def markcopied(self, origin):
2085 self._parent.markcopied(self._path, origin)
2084 self._parent.markcopied(self._path, origin)
2086
2085
2087 def audit(self):
2086 def audit(self):
2088 pass
2087 pass
2089
2088
2090 def flags(self):
2089 def flags(self):
2091 return self._parent.flags(self._path)
2090 return self._parent.flags(self._path)
2092
2091
2093 def setflags(self, islink, isexec):
2092 def setflags(self, islink, isexec):
2094 return self._parent.setflags(self._path, islink, isexec)
2093 return self._parent.setflags(self._path, islink, isexec)
2095
2094
2096 def write(self, data, flags, backgroundclose=False, **kwargs):
2095 def write(self, data, flags, backgroundclose=False, **kwargs):
2097 return self._parent.write(self._path, data, flags, **kwargs)
2096 return self._parent.write(self._path, data, flags, **kwargs)
2098
2097
2099 def remove(self, ignoremissing=False):
2098 def remove(self, ignoremissing=False):
2100 return self._parent.remove(self._path)
2099 return self._parent.remove(self._path)
2101
2100
2102 def clearunknown(self):
2101 def clearunknown(self):
2103 pass
2102 pass
2104
2103
2105 class workingcommitctx(workingctx):
2104 class workingcommitctx(workingctx):
2106 """A workingcommitctx object makes access to data related to
2105 """A workingcommitctx object makes access to data related to
2107 the revision being committed convenient.
2106 the revision being committed convenient.
2108
2107
2109 This hides changes in the working directory, if they aren't
2108 This hides changes in the working directory, if they aren't
2110 committed in this context.
2109 committed in this context.
2111 """
2110 """
2112 def __init__(self, repo, changes,
2111 def __init__(self, repo, changes,
2113 text="", user=None, date=None, extra=None):
2112 text="", user=None, date=None, extra=None):
2114 super(workingctx, self).__init__(repo, text, user, date, extra,
2113 super(workingctx, self).__init__(repo, text, user, date, extra,
2115 changes)
2114 changes)
2116
2115
2117 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2116 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2118 """Return matched files only in ``self._status``
2117 """Return matched files only in ``self._status``
2119
2118
2120 Uncommitted files appear "clean" via this context, even if
2119 Uncommitted files appear "clean" via this context, even if
2121 they aren't actually so in the working directory.
2120 they aren't actually so in the working directory.
2122 """
2121 """
2123 if clean:
2122 if clean:
2124 clean = [f for f in self._manifest if f not in self._changedset]
2123 clean = [f for f in self._manifest if f not in self._changedset]
2125 else:
2124 else:
2126 clean = []
2125 clean = []
2127 return scmutil.status([f for f in self._status.modified if match(f)],
2126 return scmutil.status([f for f in self._status.modified if match(f)],
2128 [f for f in self._status.added if match(f)],
2127 [f for f in self._status.added if match(f)],
2129 [f for f in self._status.removed if match(f)],
2128 [f for f in self._status.removed if match(f)],
2130 [], [], [], clean)
2129 [], [], [], clean)
2131
2130
2132 @propertycache
2131 @propertycache
2133 def _changedset(self):
2132 def _changedset(self):
2134 """Return the set of files changed in this context
2133 """Return the set of files changed in this context
2135 """
2134 """
2136 changed = set(self._status.modified)
2135 changed = set(self._status.modified)
2137 changed.update(self._status.added)
2136 changed.update(self._status.added)
2138 changed.update(self._status.removed)
2137 changed.update(self._status.removed)
2139 return changed
2138 return changed
2140
2139
2141 def makecachingfilectxfn(func):
2140 def makecachingfilectxfn(func):
2142 """Create a filectxfn that caches based on the path.
2141 """Create a filectxfn that caches based on the path.
2143
2142
2144 We can't use util.cachefunc because it uses all arguments as the cache
2143 We can't use util.cachefunc because it uses all arguments as the cache
2145 key and this creates a cycle since the arguments include the repo and
2144 key and this creates a cycle since the arguments include the repo and
2146 memctx.
2145 memctx.
2147 """
2146 """
2148 cache = {}
2147 cache = {}
2149
2148
2150 def getfilectx(repo, memctx, path):
2149 def getfilectx(repo, memctx, path):
2151 if path not in cache:
2150 if path not in cache:
2152 cache[path] = func(repo, memctx, path)
2151 cache[path] = func(repo, memctx, path)
2153 return cache[path]
2152 return cache[path]
2154
2153
2155 return getfilectx
2154 return getfilectx
2156
2155
2157 def memfilefromctx(ctx):
2156 def memfilefromctx(ctx):
2158 """Given a context return a memfilectx for ctx[path]
2157 """Given a context return a memfilectx for ctx[path]
2159
2158
2160 This is a convenience method for building a memctx based on another
2159 This is a convenience method for building a memctx based on another
2161 context.
2160 context.
2162 """
2161 """
2163 def getfilectx(repo, memctx, path):
2162 def getfilectx(repo, memctx, path):
2164 fctx = ctx[path]
2163 fctx = ctx[path]
2165 # this is weird but apparently we only keep track of one parent
2164 # this is weird but apparently we only keep track of one parent
2166 # (why not only store that instead of a tuple?)
2165 # (why not only store that instead of a tuple?)
2167 copied = fctx.renamed()
2166 copied = fctx.renamed()
2168 if copied:
2167 if copied:
2169 copied = copied[0]
2168 copied = copied[0]
2170 return memfilectx(repo, memctx, path, fctx.data(),
2169 return memfilectx(repo, memctx, path, fctx.data(),
2171 islink=fctx.islink(), isexec=fctx.isexec(),
2170 islink=fctx.islink(), isexec=fctx.isexec(),
2172 copied=copied)
2171 copied=copied)
2173
2172
2174 return getfilectx
2173 return getfilectx
2175
2174
2176 def memfilefrompatch(patchstore):
2175 def memfilefrompatch(patchstore):
2177 """Given a patch (e.g. patchstore object) return a memfilectx
2176 """Given a patch (e.g. patchstore object) return a memfilectx
2178
2177
2179 This is a convenience method for building a memctx based on a patchstore.
2178 This is a convenience method for building a memctx based on a patchstore.
2180 """
2179 """
2181 def getfilectx(repo, memctx, path):
2180 def getfilectx(repo, memctx, path):
2182 data, mode, copied = patchstore.getfile(path)
2181 data, mode, copied = patchstore.getfile(path)
2183 if data is None:
2182 if data is None:
2184 return None
2183 return None
2185 islink, isexec = mode
2184 islink, isexec = mode
2186 return memfilectx(repo, memctx, path, data, islink=islink,
2185 return memfilectx(repo, memctx, path, data, islink=islink,
2187 isexec=isexec, copied=copied)
2186 isexec=isexec, copied=copied)
2188
2187
2189 return getfilectx
2188 return getfilectx
2190
2189
2191 class memctx(committablectx):
2190 class memctx(committablectx):
2192 """Use memctx to perform in-memory commits via localrepo.commitctx().
2191 """Use memctx to perform in-memory commits via localrepo.commitctx().
2193
2192
2194 Revision information is supplied at initialization time while
2193 Revision information is supplied at initialization time while
2195 related files data and is made available through a callback
2194 related files data and is made available through a callback
2196 mechanism. 'repo' is the current localrepo, 'parents' is a
2195 mechanism. 'repo' is the current localrepo, 'parents' is a
2197 sequence of two parent revisions identifiers (pass None for every
2196 sequence of two parent revisions identifiers (pass None for every
2198 missing parent), 'text' is the commit message and 'files' lists
2197 missing parent), 'text' is the commit message and 'files' lists
2199 names of files touched by the revision (normalized and relative to
2198 names of files touched by the revision (normalized and relative to
2200 repository root).
2199 repository root).
2201
2200
2202 filectxfn(repo, memctx, path) is a callable receiving the
2201 filectxfn(repo, memctx, path) is a callable receiving the
2203 repository, the current memctx object and the normalized path of
2202 repository, the current memctx object and the normalized path of
2204 requested file, relative to repository root. It is fired by the
2203 requested file, relative to repository root. It is fired by the
2205 commit function for every file in 'files', but calls order is
2204 commit function for every file in 'files', but calls order is
2206 undefined. If the file is available in the revision being
2205 undefined. If the file is available in the revision being
2207 committed (updated or added), filectxfn returns a memfilectx
2206 committed (updated or added), filectxfn returns a memfilectx
2208 object. If the file was removed, filectxfn return None for recent
2207 object. If the file was removed, filectxfn return None for recent
2209 Mercurial. Moved files are represented by marking the source file
2208 Mercurial. Moved files are represented by marking the source file
2210 removed and the new file added with copy information (see
2209 removed and the new file added with copy information (see
2211 memfilectx).
2210 memfilectx).
2212
2211
2213 user receives the committer name and defaults to current
2212 user receives the committer name and defaults to current
2214 repository username, date is the commit date in any format
2213 repository username, date is the commit date in any format
2215 supported by dateutil.parsedate() and defaults to current date, extra
2214 supported by dateutil.parsedate() and defaults to current date, extra
2216 is a dictionary of metadata or is left empty.
2215 is a dictionary of metadata or is left empty.
2217 """
2216 """
2218
2217
2219 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2218 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2220 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2219 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2221 # this field to determine what to do in filectxfn.
2220 # this field to determine what to do in filectxfn.
2222 _returnnoneformissingfiles = True
2221 _returnnoneformissingfiles = True
2223
2222
2224 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2223 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2225 date=None, extra=None, branch=None, editor=False):
2224 date=None, extra=None, branch=None, editor=False):
2226 super(memctx, self).__init__(repo, text, user, date, extra)
2225 super(memctx, self).__init__(repo, text, user, date, extra)
2227 self._rev = None
2226 self._rev = None
2228 self._node = None
2227 self._node = None
2229 parents = [(p or nullid) for p in parents]
2228 parents = [(p or nullid) for p in parents]
2230 p1, p2 = parents
2229 p1, p2 = parents
2231 self._parents = [self._repo[p] for p in (p1, p2)]
2230 self._parents = [self._repo[p] for p in (p1, p2)]
2232 files = sorted(set(files))
2231 files = sorted(set(files))
2233 self._files = files
2232 self._files = files
2234 if branch is not None:
2233 if branch is not None:
2235 self._extra['branch'] = encoding.fromlocal(branch)
2234 self._extra['branch'] = encoding.fromlocal(branch)
2236 self.substate = {}
2235 self.substate = {}
2237
2236
2238 if isinstance(filectxfn, patch.filestore):
2237 if isinstance(filectxfn, patch.filestore):
2239 filectxfn = memfilefrompatch(filectxfn)
2238 filectxfn = memfilefrompatch(filectxfn)
2240 elif not callable(filectxfn):
2239 elif not callable(filectxfn):
2241 # if store is not callable, wrap it in a function
2240 # if store is not callable, wrap it in a function
2242 filectxfn = memfilefromctx(filectxfn)
2241 filectxfn = memfilefromctx(filectxfn)
2243
2242
2244 # memoizing increases performance for e.g. vcs convert scenarios.
2243 # memoizing increases performance for e.g. vcs convert scenarios.
2245 self._filectxfn = makecachingfilectxfn(filectxfn)
2244 self._filectxfn = makecachingfilectxfn(filectxfn)
2246
2245
2247 if editor:
2246 if editor:
2248 self._text = editor(self._repo, self, [])
2247 self._text = editor(self._repo, self, [])
2249 self._repo.savecommitmessage(self._text)
2248 self._repo.savecommitmessage(self._text)
2250
2249
2251 def filectx(self, path, filelog=None):
2250 def filectx(self, path, filelog=None):
2252 """get a file context from the working directory
2251 """get a file context from the working directory
2253
2252
2254 Returns None if file doesn't exist and should be removed."""
2253 Returns None if file doesn't exist and should be removed."""
2255 return self._filectxfn(self._repo, self, path)
2254 return self._filectxfn(self._repo, self, path)
2256
2255
2257 def commit(self):
2256 def commit(self):
2258 """commit context to the repo"""
2257 """commit context to the repo"""
2259 return self._repo.commitctx(self)
2258 return self._repo.commitctx(self)
2260
2259
2261 @propertycache
2260 @propertycache
2262 def _manifest(self):
2261 def _manifest(self):
2263 """generate a manifest based on the return values of filectxfn"""
2262 """generate a manifest based on the return values of filectxfn"""
2264
2263
2265 # keep this simple for now; just worry about p1
2264 # keep this simple for now; just worry about p1
2266 pctx = self._parents[0]
2265 pctx = self._parents[0]
2267 man = pctx.manifest().copy()
2266 man = pctx.manifest().copy()
2268
2267
2269 for f in self._status.modified:
2268 for f in self._status.modified:
2270 p1node = nullid
2269 p1node = nullid
2271 p2node = nullid
2270 p2node = nullid
2272 p = pctx[f].parents() # if file isn't in pctx, check p2?
2271 p = pctx[f].parents() # if file isn't in pctx, check p2?
2273 if len(p) > 0:
2272 if len(p) > 0:
2274 p1node = p[0].filenode()
2273 p1node = p[0].filenode()
2275 if len(p) > 1:
2274 if len(p) > 1:
2276 p2node = p[1].filenode()
2275 p2node = p[1].filenode()
2277 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2276 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2278
2277
2279 for f in self._status.added:
2278 for f in self._status.added:
2280 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2279 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2281
2280
2282 for f in self._status.removed:
2281 for f in self._status.removed:
2283 if f in man:
2282 if f in man:
2284 del man[f]
2283 del man[f]
2285
2284
2286 return man
2285 return man
2287
2286
2288 @propertycache
2287 @propertycache
2289 def _status(self):
2288 def _status(self):
2290 """Calculate exact status from ``files`` specified at construction
2289 """Calculate exact status from ``files`` specified at construction
2291 """
2290 """
2292 man1 = self.p1().manifest()
2291 man1 = self.p1().manifest()
2293 p2 = self._parents[1]
2292 p2 = self._parents[1]
2294 # "1 < len(self._parents)" can't be used for checking
2293 # "1 < len(self._parents)" can't be used for checking
2295 # existence of the 2nd parent, because "memctx._parents" is
2294 # existence of the 2nd parent, because "memctx._parents" is
2296 # explicitly initialized by the list, of which length is 2.
2295 # explicitly initialized by the list, of which length is 2.
2297 if p2.node() != nullid:
2296 if p2.node() != nullid:
2298 man2 = p2.manifest()
2297 man2 = p2.manifest()
2299 managing = lambda f: f in man1 or f in man2
2298 managing = lambda f: f in man1 or f in man2
2300 else:
2299 else:
2301 managing = lambda f: f in man1
2300 managing = lambda f: f in man1
2302
2301
2303 modified, added, removed = [], [], []
2302 modified, added, removed = [], [], []
2304 for f in self._files:
2303 for f in self._files:
2305 if not managing(f):
2304 if not managing(f):
2306 added.append(f)
2305 added.append(f)
2307 elif self[f]:
2306 elif self[f]:
2308 modified.append(f)
2307 modified.append(f)
2309 else:
2308 else:
2310 removed.append(f)
2309 removed.append(f)
2311
2310
2312 return scmutil.status(modified, added, removed, [], [], [], [])
2311 return scmutil.status(modified, added, removed, [], [], [], [])
2313
2312
2314 class memfilectx(committablefilectx):
2313 class memfilectx(committablefilectx):
2315 """memfilectx represents an in-memory file to commit.
2314 """memfilectx represents an in-memory file to commit.
2316
2315
2317 See memctx and committablefilectx for more details.
2316 See memctx and committablefilectx for more details.
2318 """
2317 """
2319 def __init__(self, repo, changectx, path, data, islink=False,
2318 def __init__(self, repo, changectx, path, data, islink=False,
2320 isexec=False, copied=None):
2319 isexec=False, copied=None):
2321 """
2320 """
2322 path is the normalized file path relative to repository root.
2321 path is the normalized file path relative to repository root.
2323 data is the file content as a string.
2322 data is the file content as a string.
2324 islink is True if the file is a symbolic link.
2323 islink is True if the file is a symbolic link.
2325 isexec is True if the file is executable.
2324 isexec is True if the file is executable.
2326 copied is the source file path if current file was copied in the
2325 copied is the source file path if current file was copied in the
2327 revision being committed, or None."""
2326 revision being committed, or None."""
2328 super(memfilectx, self).__init__(repo, path, None, changectx)
2327 super(memfilectx, self).__init__(repo, path, None, changectx)
2329 self._data = data
2328 self._data = data
2330 if islink:
2329 if islink:
2331 self._flags = 'l'
2330 self._flags = 'l'
2332 elif isexec:
2331 elif isexec:
2333 self._flags = 'x'
2332 self._flags = 'x'
2334 else:
2333 else:
2335 self._flags = ''
2334 self._flags = ''
2336 self._copied = None
2335 self._copied = None
2337 if copied:
2336 if copied:
2338 self._copied = (copied, nullid)
2337 self._copied = (copied, nullid)
2339
2338
2340 def data(self):
2339 def data(self):
2341 return self._data
2340 return self._data
2342
2341
2343 def remove(self, ignoremissing=False):
2342 def remove(self, ignoremissing=False):
2344 """wraps unlink for a repo's working directory"""
2343 """wraps unlink for a repo's working directory"""
2345 # need to figure out what to do here
2344 # need to figure out what to do here
2346 del self._changectx[self._path]
2345 del self._changectx[self._path]
2347
2346
2348 def write(self, data, flags, **kwargs):
2347 def write(self, data, flags, **kwargs):
2349 """wraps repo.wwrite"""
2348 """wraps repo.wwrite"""
2350 self._data = data
2349 self._data = data
2351
2350
2352 class overlayfilectx(committablefilectx):
2351 class overlayfilectx(committablefilectx):
2353 """Like memfilectx but take an original filectx and optional parameters to
2352 """Like memfilectx but take an original filectx and optional parameters to
2354 override parts of it. This is useful when fctx.data() is expensive (i.e.
2353 override parts of it. This is useful when fctx.data() is expensive (i.e.
2355 flag processor is expensive) and raw data, flags, and filenode could be
2354 flag processor is expensive) and raw data, flags, and filenode could be
2356 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2355 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2357 """
2356 """
2358
2357
2359 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2358 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2360 copied=None, ctx=None):
2359 copied=None, ctx=None):
2361 """originalfctx: filecontext to duplicate
2360 """originalfctx: filecontext to duplicate
2362
2361
2363 datafunc: None or a function to override data (file content). It is a
2362 datafunc: None or a function to override data (file content). It is a
2364 function to be lazy. path, flags, copied, ctx: None or overridden value
2363 function to be lazy. path, flags, copied, ctx: None or overridden value
2365
2364
2366 copied could be (path, rev), or False. copied could also be just path,
2365 copied could be (path, rev), or False. copied could also be just path,
2367 and will be converted to (path, nullid). This simplifies some callers.
2366 and will be converted to (path, nullid). This simplifies some callers.
2368 """
2367 """
2369
2368
2370 if path is None:
2369 if path is None:
2371 path = originalfctx.path()
2370 path = originalfctx.path()
2372 if ctx is None:
2371 if ctx is None:
2373 ctx = originalfctx.changectx()
2372 ctx = originalfctx.changectx()
2374 ctxmatch = lambda: True
2373 ctxmatch = lambda: True
2375 else:
2374 else:
2376 ctxmatch = lambda: ctx == originalfctx.changectx()
2375 ctxmatch = lambda: ctx == originalfctx.changectx()
2377
2376
2378 repo = originalfctx.repo()
2377 repo = originalfctx.repo()
2379 flog = originalfctx.filelog()
2378 flog = originalfctx.filelog()
2380 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2379 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2381
2380
2382 if copied is None:
2381 if copied is None:
2383 copied = originalfctx.renamed()
2382 copied = originalfctx.renamed()
2384 copiedmatch = lambda: True
2383 copiedmatch = lambda: True
2385 else:
2384 else:
2386 if copied and not isinstance(copied, tuple):
2385 if copied and not isinstance(copied, tuple):
2387 # repo._filecommit will recalculate copyrev so nullid is okay
2386 # repo._filecommit will recalculate copyrev so nullid is okay
2388 copied = (copied, nullid)
2387 copied = (copied, nullid)
2389 copiedmatch = lambda: copied == originalfctx.renamed()
2388 copiedmatch = lambda: copied == originalfctx.renamed()
2390
2389
2391 # When data, copied (could affect data), ctx (could affect filelog
2390 # When data, copied (could affect data), ctx (could affect filelog
2392 # parents) are not overridden, rawdata, rawflags, and filenode may be
2391 # parents) are not overridden, rawdata, rawflags, and filenode may be
2393 # reused (repo._filecommit should double check filelog parents).
2392 # reused (repo._filecommit should double check filelog parents).
2394 #
2393 #
2395 # path, flags are not hashed in filelog (but in manifestlog) so they do
2394 # path, flags are not hashed in filelog (but in manifestlog) so they do
2396 # not affect reusable here.
2395 # not affect reusable here.
2397 #
2396 #
2398 # If ctx or copied is overridden to a same value with originalfctx,
2397 # If ctx or copied is overridden to a same value with originalfctx,
2399 # still consider it's reusable. originalfctx.renamed() may be a bit
2398 # still consider it's reusable. originalfctx.renamed() may be a bit
2400 # expensive so it's not called unless necessary. Assuming datafunc is
2399 # expensive so it's not called unless necessary. Assuming datafunc is
2401 # always expensive, do not call it for this "reusable" test.
2400 # always expensive, do not call it for this "reusable" test.
2402 reusable = datafunc is None and ctxmatch() and copiedmatch()
2401 reusable = datafunc is None and ctxmatch() and copiedmatch()
2403
2402
2404 if datafunc is None:
2403 if datafunc is None:
2405 datafunc = originalfctx.data
2404 datafunc = originalfctx.data
2406 if flags is None:
2405 if flags is None:
2407 flags = originalfctx.flags()
2406 flags = originalfctx.flags()
2408
2407
2409 self._datafunc = datafunc
2408 self._datafunc = datafunc
2410 self._flags = flags
2409 self._flags = flags
2411 self._copied = copied
2410 self._copied = copied
2412
2411
2413 if reusable:
2412 if reusable:
2414 # copy extra fields from originalfctx
2413 # copy extra fields from originalfctx
2415 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2414 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2416 for attr_ in attrs:
2415 for attr_ in attrs:
2417 if util.safehasattr(originalfctx, attr_):
2416 if util.safehasattr(originalfctx, attr_):
2418 setattr(self, attr_, getattr(originalfctx, attr_))
2417 setattr(self, attr_, getattr(originalfctx, attr_))
2419
2418
2420 def data(self):
2419 def data(self):
2421 return self._datafunc()
2420 return self._datafunc()
2422
2421
2423 class metadataonlyctx(committablectx):
2422 class metadataonlyctx(committablectx):
2424 """Like memctx but it's reusing the manifest of different commit.
2423 """Like memctx but it's reusing the manifest of different commit.
2425 Intended to be used by lightweight operations that are creating
2424 Intended to be used by lightweight operations that are creating
2426 metadata-only changes.
2425 metadata-only changes.
2427
2426
2428 Revision information is supplied at initialization time. 'repo' is the
2427 Revision information is supplied at initialization time. 'repo' is the
2429 current localrepo, 'ctx' is original revision which manifest we're reuisng
2428 current localrepo, 'ctx' is original revision which manifest we're reuisng
2430 'parents' is a sequence of two parent revisions identifiers (pass None for
2429 'parents' is a sequence of two parent revisions identifiers (pass None for
2431 every missing parent), 'text' is the commit.
2430 every missing parent), 'text' is the commit.
2432
2431
2433 user receives the committer name and defaults to current repository
2432 user receives the committer name and defaults to current repository
2434 username, date is the commit date in any format supported by
2433 username, date is the commit date in any format supported by
2435 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2434 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2436 metadata or is left empty.
2435 metadata or is left empty.
2437 """
2436 """
2438 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2437 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2439 date=None, extra=None, editor=False):
2438 date=None, extra=None, editor=False):
2440 if text is None:
2439 if text is None:
2441 text = originalctx.description()
2440 text = originalctx.description()
2442 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2441 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2443 self._rev = None
2442 self._rev = None
2444 self._node = None
2443 self._node = None
2445 self._originalctx = originalctx
2444 self._originalctx = originalctx
2446 self._manifestnode = originalctx.manifestnode()
2445 self._manifestnode = originalctx.manifestnode()
2447 if parents is None:
2446 if parents is None:
2448 parents = originalctx.parents()
2447 parents = originalctx.parents()
2449 else:
2448 else:
2450 parents = [repo[p] for p in parents if p is not None]
2449 parents = [repo[p] for p in parents if p is not None]
2451 parents = parents[:]
2450 parents = parents[:]
2452 while len(parents) < 2:
2451 while len(parents) < 2:
2453 parents.append(repo[nullid])
2452 parents.append(repo[nullid])
2454 p1, p2 = self._parents = parents
2453 p1, p2 = self._parents = parents
2455
2454
2456 # sanity check to ensure that the reused manifest parents are
2455 # sanity check to ensure that the reused manifest parents are
2457 # manifests of our commit parents
2456 # manifests of our commit parents
2458 mp1, mp2 = self.manifestctx().parents
2457 mp1, mp2 = self.manifestctx().parents
2459 if p1 != nullid and p1.manifestnode() != mp1:
2458 if p1 != nullid and p1.manifestnode() != mp1:
2460 raise RuntimeError('can\'t reuse the manifest: '
2459 raise RuntimeError('can\'t reuse the manifest: '
2461 'its p1 doesn\'t match the new ctx p1')
2460 'its p1 doesn\'t match the new ctx p1')
2462 if p2 != nullid and p2.manifestnode() != mp2:
2461 if p2 != nullid and p2.manifestnode() != mp2:
2463 raise RuntimeError('can\'t reuse the manifest: '
2462 raise RuntimeError('can\'t reuse the manifest: '
2464 'its p2 doesn\'t match the new ctx p2')
2463 'its p2 doesn\'t match the new ctx p2')
2465
2464
2466 self._files = originalctx.files()
2465 self._files = originalctx.files()
2467 self.substate = {}
2466 self.substate = {}
2468
2467
2469 if editor:
2468 if editor:
2470 self._text = editor(self._repo, self, [])
2469 self._text = editor(self._repo, self, [])
2471 self._repo.savecommitmessage(self._text)
2470 self._repo.savecommitmessage(self._text)
2472
2471
2473 def manifestnode(self):
2472 def manifestnode(self):
2474 return self._manifestnode
2473 return self._manifestnode
2475
2474
2476 @property
2475 @property
2477 def _manifestctx(self):
2476 def _manifestctx(self):
2478 return self._repo.manifestlog[self._manifestnode]
2477 return self._repo.manifestlog[self._manifestnode]
2479
2478
2480 def filectx(self, path, filelog=None):
2479 def filectx(self, path, filelog=None):
2481 return self._originalctx.filectx(path, filelog=filelog)
2480 return self._originalctx.filectx(path, filelog=filelog)
2482
2481
2483 def commit(self):
2482 def commit(self):
2484 """commit context to the repo"""
2483 """commit context to the repo"""
2485 return self._repo.commitctx(self)
2484 return self._repo.commitctx(self)
2486
2485
2487 @property
2486 @property
2488 def _manifest(self):
2487 def _manifest(self):
2489 return self._originalctx.manifest()
2488 return self._originalctx.manifest()
2490
2489
2491 @propertycache
2490 @propertycache
2492 def _status(self):
2491 def _status(self):
2493 """Calculate exact status from ``files`` specified in the ``origctx``
2492 """Calculate exact status from ``files`` specified in the ``origctx``
2494 and parents manifests.
2493 and parents manifests.
2495 """
2494 """
2496 man1 = self.p1().manifest()
2495 man1 = self.p1().manifest()
2497 p2 = self._parents[1]
2496 p2 = self._parents[1]
2498 # "1 < len(self._parents)" can't be used for checking
2497 # "1 < len(self._parents)" can't be used for checking
2499 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2498 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2500 # explicitly initialized by the list, of which length is 2.
2499 # explicitly initialized by the list, of which length is 2.
2501 if p2.node() != nullid:
2500 if p2.node() != nullid:
2502 man2 = p2.manifest()
2501 man2 = p2.manifest()
2503 managing = lambda f: f in man1 or f in man2
2502 managing = lambda f: f in man1 or f in man2
2504 else:
2503 else:
2505 managing = lambda f: f in man1
2504 managing = lambda f: f in man1
2506
2505
2507 modified, added, removed = [], [], []
2506 modified, added, removed = [], [], []
2508 for f in self._files:
2507 for f in self._files:
2509 if not managing(f):
2508 if not managing(f):
2510 added.append(f)
2509 added.append(f)
2511 elif f in self:
2510 elif f in self:
2512 modified.append(f)
2511 modified.append(f)
2513 else:
2512 else:
2514 removed.append(f)
2513 removed.append(f)
2515
2514
2516 return scmutil.status(modified, added, removed, [], [], [], [])
2515 return scmutil.status(modified, added, removed, [], [], [], [])
2517
2516
2518 class arbitraryfilectx(object):
2517 class arbitraryfilectx(object):
2519 """Allows you to use filectx-like functions on a file in an arbitrary
2518 """Allows you to use filectx-like functions on a file in an arbitrary
2520 location on disk, possibly not in the working directory.
2519 location on disk, possibly not in the working directory.
2521 """
2520 """
2522 def __init__(self, path, repo=None):
2521 def __init__(self, path, repo=None):
2523 # Repo is optional because contrib/simplemerge uses this class.
2522 # Repo is optional because contrib/simplemerge uses this class.
2524 self._repo = repo
2523 self._repo = repo
2525 self._path = path
2524 self._path = path
2526
2525
2527 def cmp(self, fctx):
2526 def cmp(self, fctx):
2528 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2527 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2529 # path if either side is a symlink.
2528 # path if either side is a symlink.
2530 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2529 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2531 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2530 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2532 # Add a fast-path for merge if both sides are disk-backed.
2531 # Add a fast-path for merge if both sides are disk-backed.
2533 # Note that filecmp uses the opposite return values (True if same)
2532 # Note that filecmp uses the opposite return values (True if same)
2534 # from our cmp functions (True if different).
2533 # from our cmp functions (True if different).
2535 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2534 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2536 return self.data() != fctx.data()
2535 return self.data() != fctx.data()
2537
2536
2538 def path(self):
2537 def path(self):
2539 return self._path
2538 return self._path
2540
2539
2541 def flags(self):
2540 def flags(self):
2542 return ''
2541 return ''
2543
2542
2544 def data(self):
2543 def data(self):
2545 return util.readfile(self._path)
2544 return util.readfile(self._path)
2546
2545
2547 def decodeddata(self):
2546 def decodeddata(self):
2548 with open(self._path, "rb") as f:
2547 with open(self._path, "rb") as f:
2549 return f.read()
2548 return f.read()
2550
2549
2551 def remove(self):
2550 def remove(self):
2552 util.unlink(self._path)
2551 util.unlink(self._path)
2553
2552
2554 def write(self, data, flags, **kwargs):
2553 def write(self, data, flags, **kwargs):
2555 assert not flags
2554 assert not flags
2556 with open(self._path, "w") as f:
2555 with open(self._path, "w") as f:
2557 f.write(data)
2556 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now