##// END OF EJS Templates
hidden: extract the code generating "filtered rev" error for wrapping...
Pierre-Yves David -
r32006:c84c83b5 default
parent child Browse files
Show More
@@ -1,2160 +1,2167
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirid,
24 wdirid,
25 wdirnodes,
25 wdirnodes,
26 )
26 )
27 from . import (
27 from . import (
28 encoding,
28 encoding,
29 error,
29 error,
30 fileset,
30 fileset,
31 match as matchmod,
31 match as matchmod,
32 mdiff,
32 mdiff,
33 obsolete as obsmod,
33 obsolete as obsmod,
34 patch,
34 patch,
35 phases,
35 phases,
36 pycompat,
36 pycompat,
37 repoview,
37 repoview,
38 revlog,
38 revlog,
39 scmutil,
39 scmutil,
40 subrepo,
40 subrepo,
41 util,
41 util,
42 )
42 )
43
43
44 propertycache = util.propertycache
44 propertycache = util.propertycache
45
45
46 nonascii = re.compile(r'[^\x21-\x7f]').search
46 nonascii = re.compile(r'[^\x21-\x7f]').search
47
47
48 class basectx(object):
48 class basectx(object):
49 """A basectx object represents the common logic for its children:
49 """A basectx object represents the common logic for its children:
50 changectx: read-only context that is already present in the repo,
50 changectx: read-only context that is already present in the repo,
51 workingctx: a context that represents the working directory and can
51 workingctx: a context that represents the working directory and can
52 be committed,
52 be committed,
53 memctx: a context that represents changes in-memory and can also
53 memctx: a context that represents changes in-memory and can also
54 be committed."""
54 be committed."""
55 def __new__(cls, repo, changeid='', *args, **kwargs):
55 def __new__(cls, repo, changeid='', *args, **kwargs):
56 if isinstance(changeid, basectx):
56 if isinstance(changeid, basectx):
57 return changeid
57 return changeid
58
58
59 o = super(basectx, cls).__new__(cls)
59 o = super(basectx, cls).__new__(cls)
60
60
61 o._repo = repo
61 o._repo = repo
62 o._rev = nullrev
62 o._rev = nullrev
63 o._node = nullid
63 o._node = nullid
64
64
65 return o
65 return o
66
66
67 def __str__(self):
67 def __str__(self):
68 r = short(self.node())
68 r = short(self.node())
69 if pycompat.ispy3:
69 if pycompat.ispy3:
70 return r.decode('ascii')
70 return r.decode('ascii')
71 return r
71 return r
72
72
73 def __bytes__(self):
73 def __bytes__(self):
74 return short(self.node())
74 return short(self.node())
75
75
76 def __int__(self):
76 def __int__(self):
77 return self.rev()
77 return self.rev()
78
78
79 def __repr__(self):
79 def __repr__(self):
80 return "<%s %s>" % (type(self).__name__, str(self))
80 return "<%s %s>" % (type(self).__name__, str(self))
81
81
82 def __eq__(self, other):
82 def __eq__(self, other):
83 try:
83 try:
84 return type(self) == type(other) and self._rev == other._rev
84 return type(self) == type(other) and self._rev == other._rev
85 except AttributeError:
85 except AttributeError:
86 return False
86 return False
87
87
88 def __ne__(self, other):
88 def __ne__(self, other):
89 return not (self == other)
89 return not (self == other)
90
90
91 def __contains__(self, key):
91 def __contains__(self, key):
92 return key in self._manifest
92 return key in self._manifest
93
93
94 def __getitem__(self, key):
94 def __getitem__(self, key):
95 return self.filectx(key)
95 return self.filectx(key)
96
96
97 def __iter__(self):
97 def __iter__(self):
98 return iter(self._manifest)
98 return iter(self._manifest)
99
99
100 def _buildstatusmanifest(self, status):
100 def _buildstatusmanifest(self, status):
101 """Builds a manifest that includes the given status results, if this is
101 """Builds a manifest that includes the given status results, if this is
102 a working copy context. For non-working copy contexts, it just returns
102 a working copy context. For non-working copy contexts, it just returns
103 the normal manifest."""
103 the normal manifest."""
104 return self.manifest()
104 return self.manifest()
105
105
106 def _matchstatus(self, other, match):
106 def _matchstatus(self, other, match):
107 """return match.always if match is none
107 """return match.always if match is none
108
108
109 This internal method provides a way for child objects to override the
109 This internal method provides a way for child objects to override the
110 match operator.
110 match operator.
111 """
111 """
112 return match or matchmod.always(self._repo.root, self._repo.getcwd())
112 return match or matchmod.always(self._repo.root, self._repo.getcwd())
113
113
114 def _buildstatus(self, other, s, match, listignored, listclean,
114 def _buildstatus(self, other, s, match, listignored, listclean,
115 listunknown):
115 listunknown):
116 """build a status with respect to another context"""
116 """build a status with respect to another context"""
117 # Load earliest manifest first for caching reasons. More specifically,
117 # Load earliest manifest first for caching reasons. More specifically,
118 # if you have revisions 1000 and 1001, 1001 is probably stored as a
118 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
119 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # 1000 and cache it so that when you read 1001, we just need to apply a
120 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # delta to what's in the cache. So that's one full reconstruction + one
121 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta application.
122 # delta application.
123 mf2 = None
123 mf2 = None
124 if self.rev() is not None and self.rev() < other.rev():
124 if self.rev() is not None and self.rev() < other.rev():
125 mf2 = self._buildstatusmanifest(s)
125 mf2 = self._buildstatusmanifest(s)
126 mf1 = other._buildstatusmanifest(s)
126 mf1 = other._buildstatusmanifest(s)
127 if mf2 is None:
127 if mf2 is None:
128 mf2 = self._buildstatusmanifest(s)
128 mf2 = self._buildstatusmanifest(s)
129
129
130 modified, added = [], []
130 modified, added = [], []
131 removed = []
131 removed = []
132 clean = []
132 clean = []
133 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
133 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deletedset = set(deleted)
134 deletedset = set(deleted)
135 d = mf1.diff(mf2, match=match, clean=listclean)
135 d = mf1.diff(mf2, match=match, clean=listclean)
136 for fn, value in d.iteritems():
136 for fn, value in d.iteritems():
137 if fn in deletedset:
137 if fn in deletedset:
138 continue
138 continue
139 if value is None:
139 if value is None:
140 clean.append(fn)
140 clean.append(fn)
141 continue
141 continue
142 (node1, flag1), (node2, flag2) = value
142 (node1, flag1), (node2, flag2) = value
143 if node1 is None:
143 if node1 is None:
144 added.append(fn)
144 added.append(fn)
145 elif node2 is None:
145 elif node2 is None:
146 removed.append(fn)
146 removed.append(fn)
147 elif flag1 != flag2:
147 elif flag1 != flag2:
148 modified.append(fn)
148 modified.append(fn)
149 elif node2 not in wdirnodes:
149 elif node2 not in wdirnodes:
150 # When comparing files between two commits, we save time by
150 # When comparing files between two commits, we save time by
151 # not comparing the file contents when the nodeids differ.
151 # not comparing the file contents when the nodeids differ.
152 # Note that this means we incorrectly report a reverted change
152 # Note that this means we incorrectly report a reverted change
153 # to a file as a modification.
153 # to a file as a modification.
154 modified.append(fn)
154 modified.append(fn)
155 elif self[fn].cmp(other[fn]):
155 elif self[fn].cmp(other[fn]):
156 modified.append(fn)
156 modified.append(fn)
157 else:
157 else:
158 clean.append(fn)
158 clean.append(fn)
159
159
160 if removed:
160 if removed:
161 # need to filter files if they are already reported as removed
161 # need to filter files if they are already reported as removed
162 unknown = [fn for fn in unknown if fn not in mf1 and
162 unknown = [fn for fn in unknown if fn not in mf1 and
163 (not match or match(fn))]
163 (not match or match(fn))]
164 ignored = [fn for fn in ignored if fn not in mf1 and
164 ignored = [fn for fn in ignored if fn not in mf1 and
165 (not match or match(fn))]
165 (not match or match(fn))]
166 # if they're deleted, don't report them as removed
166 # if they're deleted, don't report them as removed
167 removed = [fn for fn in removed if fn not in deletedset]
167 removed = [fn for fn in removed if fn not in deletedset]
168
168
169 return scmutil.status(modified, added, removed, deleted, unknown,
169 return scmutil.status(modified, added, removed, deleted, unknown,
170 ignored, clean)
170 ignored, clean)
171
171
172 @propertycache
172 @propertycache
173 def substate(self):
173 def substate(self):
174 return subrepo.state(self, self._repo.ui)
174 return subrepo.state(self, self._repo.ui)
175
175
176 def subrev(self, subpath):
176 def subrev(self, subpath):
177 return self.substate[subpath][1]
177 return self.substate[subpath][1]
178
178
179 def rev(self):
179 def rev(self):
180 return self._rev
180 return self._rev
181 def node(self):
181 def node(self):
182 return self._node
182 return self._node
183 def hex(self):
183 def hex(self):
184 return hex(self.node())
184 return hex(self.node())
185 def manifest(self):
185 def manifest(self):
186 return self._manifest
186 return self._manifest
187 def manifestctx(self):
187 def manifestctx(self):
188 return self._manifestctx
188 return self._manifestctx
189 def repo(self):
189 def repo(self):
190 return self._repo
190 return self._repo
191 def phasestr(self):
191 def phasestr(self):
192 return phases.phasenames[self.phase()]
192 return phases.phasenames[self.phase()]
193 def mutable(self):
193 def mutable(self):
194 return self.phase() > phases.public
194 return self.phase() > phases.public
195
195
196 def getfileset(self, expr):
196 def getfileset(self, expr):
197 return fileset.getfileset(self, expr)
197 return fileset.getfileset(self, expr)
198
198
199 def obsolete(self):
199 def obsolete(self):
200 """True if the changeset is obsolete"""
200 """True if the changeset is obsolete"""
201 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
201 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202
202
203 def extinct(self):
203 def extinct(self):
204 """True if the changeset is extinct"""
204 """True if the changeset is extinct"""
205 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
205 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206
206
207 def unstable(self):
207 def unstable(self):
208 """True if the changeset is not obsolete but it's ancestor are"""
208 """True if the changeset is not obsolete but it's ancestor are"""
209 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
209 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
210
210
211 def bumped(self):
211 def bumped(self):
212 """True if the changeset try to be a successor of a public changeset
212 """True if the changeset try to be a successor of a public changeset
213
213
214 Only non-public and non-obsolete changesets may be bumped.
214 Only non-public and non-obsolete changesets may be bumped.
215 """
215 """
216 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
216 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
217
217
218 def divergent(self):
218 def divergent(self):
219 """Is a successors of a changeset with multiple possible successors set
219 """Is a successors of a changeset with multiple possible successors set
220
220
221 Only non-public and non-obsolete changesets may be divergent.
221 Only non-public and non-obsolete changesets may be divergent.
222 """
222 """
223 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
223 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
224
224
225 def troubled(self):
225 def troubled(self):
226 """True if the changeset is either unstable, bumped or divergent"""
226 """True if the changeset is either unstable, bumped or divergent"""
227 return self.unstable() or self.bumped() or self.divergent()
227 return self.unstable() or self.bumped() or self.divergent()
228
228
229 def troubles(self):
229 def troubles(self):
230 """return the list of troubles affecting this changesets.
230 """return the list of troubles affecting this changesets.
231
231
232 Troubles are returned as strings. possible values are:
232 Troubles are returned as strings. possible values are:
233 - unstable,
233 - unstable,
234 - bumped,
234 - bumped,
235 - divergent.
235 - divergent.
236 """
236 """
237 troubles = []
237 troubles = []
238 if self.unstable():
238 if self.unstable():
239 troubles.append('unstable')
239 troubles.append('unstable')
240 if self.bumped():
240 if self.bumped():
241 troubles.append('bumped')
241 troubles.append('bumped')
242 if self.divergent():
242 if self.divergent():
243 troubles.append('divergent')
243 troubles.append('divergent')
244 return troubles
244 return troubles
245
245
246 def parents(self):
246 def parents(self):
247 """return contexts for each parent changeset"""
247 """return contexts for each parent changeset"""
248 return self._parents
248 return self._parents
249
249
250 def p1(self):
250 def p1(self):
251 return self._parents[0]
251 return self._parents[0]
252
252
253 def p2(self):
253 def p2(self):
254 parents = self._parents
254 parents = self._parents
255 if len(parents) == 2:
255 if len(parents) == 2:
256 return parents[1]
256 return parents[1]
257 return changectx(self._repo, nullrev)
257 return changectx(self._repo, nullrev)
258
258
259 def _fileinfo(self, path):
259 def _fileinfo(self, path):
260 if '_manifest' in self.__dict__:
260 if '_manifest' in self.__dict__:
261 try:
261 try:
262 return self._manifest[path], self._manifest.flags(path)
262 return self._manifest[path], self._manifest.flags(path)
263 except KeyError:
263 except KeyError:
264 raise error.ManifestLookupError(self._node, path,
264 raise error.ManifestLookupError(self._node, path,
265 _('not found in manifest'))
265 _('not found in manifest'))
266 if '_manifestdelta' in self.__dict__ or path in self.files():
266 if '_manifestdelta' in self.__dict__ or path in self.files():
267 if path in self._manifestdelta:
267 if path in self._manifestdelta:
268 return (self._manifestdelta[path],
268 return (self._manifestdelta[path],
269 self._manifestdelta.flags(path))
269 self._manifestdelta.flags(path))
270 mfl = self._repo.manifestlog
270 mfl = self._repo.manifestlog
271 try:
271 try:
272 node, flag = mfl[self._changeset.manifest].find(path)
272 node, flag = mfl[self._changeset.manifest].find(path)
273 except KeyError:
273 except KeyError:
274 raise error.ManifestLookupError(self._node, path,
274 raise error.ManifestLookupError(self._node, path,
275 _('not found in manifest'))
275 _('not found in manifest'))
276
276
277 return node, flag
277 return node, flag
278
278
279 def filenode(self, path):
279 def filenode(self, path):
280 return self._fileinfo(path)[0]
280 return self._fileinfo(path)[0]
281
281
282 def flags(self, path):
282 def flags(self, path):
283 try:
283 try:
284 return self._fileinfo(path)[1]
284 return self._fileinfo(path)[1]
285 except error.LookupError:
285 except error.LookupError:
286 return ''
286 return ''
287
287
288 def sub(self, path, allowcreate=True):
288 def sub(self, path, allowcreate=True):
289 '''return a subrepo for the stored revision of path, never wdir()'''
289 '''return a subrepo for the stored revision of path, never wdir()'''
290 return subrepo.subrepo(self, path, allowcreate=allowcreate)
290 return subrepo.subrepo(self, path, allowcreate=allowcreate)
291
291
292 def nullsub(self, path, pctx):
292 def nullsub(self, path, pctx):
293 return subrepo.nullsubrepo(self, path, pctx)
293 return subrepo.nullsubrepo(self, path, pctx)
294
294
295 def workingsub(self, path):
295 def workingsub(self, path):
296 '''return a subrepo for the stored revision, or wdir if this is a wdir
296 '''return a subrepo for the stored revision, or wdir if this is a wdir
297 context.
297 context.
298 '''
298 '''
299 return subrepo.subrepo(self, path, allowwdir=True)
299 return subrepo.subrepo(self, path, allowwdir=True)
300
300
301 def match(self, pats=None, include=None, exclude=None, default='glob',
301 def match(self, pats=None, include=None, exclude=None, default='glob',
302 listsubrepos=False, badfn=None):
302 listsubrepos=False, badfn=None):
303 if pats is None:
303 if pats is None:
304 pats = []
304 pats = []
305 r = self._repo
305 r = self._repo
306 return matchmod.match(r.root, r.getcwd(), pats,
306 return matchmod.match(r.root, r.getcwd(), pats,
307 include, exclude, default,
307 include, exclude, default,
308 auditor=r.nofsauditor, ctx=self,
308 auditor=r.nofsauditor, ctx=self,
309 listsubrepos=listsubrepos, badfn=badfn)
309 listsubrepos=listsubrepos, badfn=badfn)
310
310
311 def diff(self, ctx2=None, match=None, **opts):
311 def diff(self, ctx2=None, match=None, **opts):
312 """Returns a diff generator for the given contexts and matcher"""
312 """Returns a diff generator for the given contexts and matcher"""
313 if ctx2 is None:
313 if ctx2 is None:
314 ctx2 = self.p1()
314 ctx2 = self.p1()
315 if ctx2 is not None:
315 if ctx2 is not None:
316 ctx2 = self._repo[ctx2]
316 ctx2 = self._repo[ctx2]
317 diffopts = patch.diffopts(self._repo.ui, opts)
317 diffopts = patch.diffopts(self._repo.ui, opts)
318 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
318 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
319
319
320 def dirs(self):
320 def dirs(self):
321 return self._manifest.dirs()
321 return self._manifest.dirs()
322
322
323 def hasdir(self, dir):
323 def hasdir(self, dir):
324 return self._manifest.hasdir(dir)
324 return self._manifest.hasdir(dir)
325
325
326 def dirty(self, missing=False, merge=True, branch=True):
326 def dirty(self, missing=False, merge=True, branch=True):
327 return False
327 return False
328
328
329 def status(self, other=None, match=None, listignored=False,
329 def status(self, other=None, match=None, listignored=False,
330 listclean=False, listunknown=False, listsubrepos=False):
330 listclean=False, listunknown=False, listsubrepos=False):
331 """return status of files between two nodes or node and working
331 """return status of files between two nodes or node and working
332 directory.
332 directory.
333
333
334 If other is None, compare this node with working directory.
334 If other is None, compare this node with working directory.
335
335
336 returns (modified, added, removed, deleted, unknown, ignored, clean)
336 returns (modified, added, removed, deleted, unknown, ignored, clean)
337 """
337 """
338
338
339 ctx1 = self
339 ctx1 = self
340 ctx2 = self._repo[other]
340 ctx2 = self._repo[other]
341
341
342 # This next code block is, admittedly, fragile logic that tests for
342 # This next code block is, admittedly, fragile logic that tests for
343 # reversing the contexts and wouldn't need to exist if it weren't for
343 # reversing the contexts and wouldn't need to exist if it weren't for
344 # the fast (and common) code path of comparing the working directory
344 # the fast (and common) code path of comparing the working directory
345 # with its first parent.
345 # with its first parent.
346 #
346 #
347 # What we're aiming for here is the ability to call:
347 # What we're aiming for here is the ability to call:
348 #
348 #
349 # workingctx.status(parentctx)
349 # workingctx.status(parentctx)
350 #
350 #
351 # If we always built the manifest for each context and compared those,
351 # If we always built the manifest for each context and compared those,
352 # then we'd be done. But the special case of the above call means we
352 # then we'd be done. But the special case of the above call means we
353 # just copy the manifest of the parent.
353 # just copy the manifest of the parent.
354 reversed = False
354 reversed = False
355 if (not isinstance(ctx1, changectx)
355 if (not isinstance(ctx1, changectx)
356 and isinstance(ctx2, changectx)):
356 and isinstance(ctx2, changectx)):
357 reversed = True
357 reversed = True
358 ctx1, ctx2 = ctx2, ctx1
358 ctx1, ctx2 = ctx2, ctx1
359
359
360 match = ctx2._matchstatus(ctx1, match)
360 match = ctx2._matchstatus(ctx1, match)
361 r = scmutil.status([], [], [], [], [], [], [])
361 r = scmutil.status([], [], [], [], [], [], [])
362 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
362 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
363 listunknown)
363 listunknown)
364
364
365 if reversed:
365 if reversed:
366 # Reverse added and removed. Clear deleted, unknown and ignored as
366 # Reverse added and removed. Clear deleted, unknown and ignored as
367 # these make no sense to reverse.
367 # these make no sense to reverse.
368 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
368 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
369 r.clean)
369 r.clean)
370
370
371 if listsubrepos:
371 if listsubrepos:
372 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
372 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
373 try:
373 try:
374 rev2 = ctx2.subrev(subpath)
374 rev2 = ctx2.subrev(subpath)
375 except KeyError:
375 except KeyError:
376 # A subrepo that existed in node1 was deleted between
376 # A subrepo that existed in node1 was deleted between
377 # node1 and node2 (inclusive). Thus, ctx2's substate
377 # node1 and node2 (inclusive). Thus, ctx2's substate
378 # won't contain that subpath. The best we can do ignore it.
378 # won't contain that subpath. The best we can do ignore it.
379 rev2 = None
379 rev2 = None
380 submatch = matchmod.subdirmatcher(subpath, match)
380 submatch = matchmod.subdirmatcher(subpath, match)
381 s = sub.status(rev2, match=submatch, ignored=listignored,
381 s = sub.status(rev2, match=submatch, ignored=listignored,
382 clean=listclean, unknown=listunknown,
382 clean=listclean, unknown=listunknown,
383 listsubrepos=True)
383 listsubrepos=True)
384 for rfiles, sfiles in zip(r, s):
384 for rfiles, sfiles in zip(r, s):
385 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
385 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
386
386
387 for l in r:
387 for l in r:
388 l.sort()
388 l.sort()
389
389
390 return r
390 return r
391
391
392
392
393 def makememctx(repo, parents, text, user, date, branch, files, store,
393 def makememctx(repo, parents, text, user, date, branch, files, store,
394 editor=None, extra=None):
394 editor=None, extra=None):
395 def getfilectx(repo, memctx, path):
395 def getfilectx(repo, memctx, path):
396 data, mode, copied = store.getfile(path)
396 data, mode, copied = store.getfile(path)
397 if data is None:
397 if data is None:
398 return None
398 return None
399 islink, isexec = mode
399 islink, isexec = mode
400 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
400 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
401 copied=copied, memctx=memctx)
401 copied=copied, memctx=memctx)
402 if extra is None:
402 if extra is None:
403 extra = {}
403 extra = {}
404 if branch:
404 if branch:
405 extra['branch'] = encoding.fromlocal(branch)
405 extra['branch'] = encoding.fromlocal(branch)
406 ctx = memctx(repo, parents, text, files, getfilectx, user,
406 ctx = memctx(repo, parents, text, files, getfilectx, user,
407 date, extra, editor)
407 date, extra, editor)
408 return ctx
408 return ctx
409
409
410 def _filterederror(repo, changeid):
411 """build an exception to be raised about a filtered changeid
412
413 This is extracted in a function to help extensions (eg: evolve) to
414 experiment with various message variants."""
415 if repo.filtername.startswith('visible'):
416 msg = _("hidden revision '%s'") % changeid
417 hint = _('use --hidden to access hidden revisions')
418 return error.FilteredRepoLookupError(msg, hint=hint)
419 msg = _("filtered revision '%s' (not in '%s' subset)")
420 msg %= (changeid, repo.filtername)
421 return error.FilteredRepoLookupError(msg)
422
410 class changectx(basectx):
423 class changectx(basectx):
411 """A changecontext object makes access to data related to a particular
424 """A changecontext object makes access to data related to a particular
412 changeset convenient. It represents a read-only context already present in
425 changeset convenient. It represents a read-only context already present in
413 the repo."""
426 the repo."""
414 def __init__(self, repo, changeid=''):
427 def __init__(self, repo, changeid=''):
415 """changeid is a revision number, node, or tag"""
428 """changeid is a revision number, node, or tag"""
416
429
417 # since basectx.__new__ already took care of copying the object, we
430 # since basectx.__new__ already took care of copying the object, we
418 # don't need to do anything in __init__, so we just exit here
431 # don't need to do anything in __init__, so we just exit here
419 if isinstance(changeid, basectx):
432 if isinstance(changeid, basectx):
420 return
433 return
421
434
422 if changeid == '':
435 if changeid == '':
423 changeid = '.'
436 changeid = '.'
424 self._repo = repo
437 self._repo = repo
425
438
426 try:
439 try:
427 if isinstance(changeid, int):
440 if isinstance(changeid, int):
428 self._node = repo.changelog.node(changeid)
441 self._node = repo.changelog.node(changeid)
429 self._rev = changeid
442 self._rev = changeid
430 return
443 return
431 if not pycompat.ispy3 and isinstance(changeid, long):
444 if not pycompat.ispy3 and isinstance(changeid, long):
432 changeid = str(changeid)
445 changeid = str(changeid)
433 if changeid == 'null':
446 if changeid == 'null':
434 self._node = nullid
447 self._node = nullid
435 self._rev = nullrev
448 self._rev = nullrev
436 return
449 return
437 if changeid == 'tip':
450 if changeid == 'tip':
438 self._node = repo.changelog.tip()
451 self._node = repo.changelog.tip()
439 self._rev = repo.changelog.rev(self._node)
452 self._rev = repo.changelog.rev(self._node)
440 return
453 return
441 if changeid == '.' or changeid == repo.dirstate.p1():
454 if changeid == '.' or changeid == repo.dirstate.p1():
442 # this is a hack to delay/avoid loading obsmarkers
455 # this is a hack to delay/avoid loading obsmarkers
443 # when we know that '.' won't be hidden
456 # when we know that '.' won't be hidden
444 self._node = repo.dirstate.p1()
457 self._node = repo.dirstate.p1()
445 self._rev = repo.unfiltered().changelog.rev(self._node)
458 self._rev = repo.unfiltered().changelog.rev(self._node)
446 return
459 return
447 if len(changeid) == 20:
460 if len(changeid) == 20:
448 try:
461 try:
449 self._node = changeid
462 self._node = changeid
450 self._rev = repo.changelog.rev(changeid)
463 self._rev = repo.changelog.rev(changeid)
451 return
464 return
452 except error.FilteredRepoLookupError:
465 except error.FilteredRepoLookupError:
453 raise
466 raise
454 except LookupError:
467 except LookupError:
455 pass
468 pass
456
469
457 try:
470 try:
458 r = int(changeid)
471 r = int(changeid)
459 if '%d' % r != changeid:
472 if '%d' % r != changeid:
460 raise ValueError
473 raise ValueError
461 l = len(repo.changelog)
474 l = len(repo.changelog)
462 if r < 0:
475 if r < 0:
463 r += l
476 r += l
464 if r < 0 or r >= l:
477 if r < 0 or r >= l:
465 raise ValueError
478 raise ValueError
466 self._rev = r
479 self._rev = r
467 self._node = repo.changelog.node(r)
480 self._node = repo.changelog.node(r)
468 return
481 return
469 except error.FilteredIndexError:
482 except error.FilteredIndexError:
470 raise
483 raise
471 except (ValueError, OverflowError, IndexError):
484 except (ValueError, OverflowError, IndexError):
472 pass
485 pass
473
486
474 if len(changeid) == 40:
487 if len(changeid) == 40:
475 try:
488 try:
476 self._node = bin(changeid)
489 self._node = bin(changeid)
477 self._rev = repo.changelog.rev(self._node)
490 self._rev = repo.changelog.rev(self._node)
478 return
491 return
479 except error.FilteredLookupError:
492 except error.FilteredLookupError:
480 raise
493 raise
481 except (TypeError, LookupError):
494 except (TypeError, LookupError):
482 pass
495 pass
483
496
484 # lookup bookmarks through the name interface
497 # lookup bookmarks through the name interface
485 try:
498 try:
486 self._node = repo.names.singlenode(repo, changeid)
499 self._node = repo.names.singlenode(repo, changeid)
487 self._rev = repo.changelog.rev(self._node)
500 self._rev = repo.changelog.rev(self._node)
488 return
501 return
489 except KeyError:
502 except KeyError:
490 pass
503 pass
491 except error.FilteredRepoLookupError:
504 except error.FilteredRepoLookupError:
492 raise
505 raise
493 except error.RepoLookupError:
506 except error.RepoLookupError:
494 pass
507 pass
495
508
496 self._node = repo.unfiltered().changelog._partialmatch(changeid)
509 self._node = repo.unfiltered().changelog._partialmatch(changeid)
497 if self._node is not None:
510 if self._node is not None:
498 self._rev = repo.changelog.rev(self._node)
511 self._rev = repo.changelog.rev(self._node)
499 return
512 return
500
513
501 # lookup failed
514 # lookup failed
502 # check if it might have come from damaged dirstate
515 # check if it might have come from damaged dirstate
503 #
516 #
504 # XXX we could avoid the unfiltered if we had a recognizable
517 # XXX we could avoid the unfiltered if we had a recognizable
505 # exception for filtered changeset access
518 # exception for filtered changeset access
506 if changeid in repo.unfiltered().dirstate.parents():
519 if changeid in repo.unfiltered().dirstate.parents():
507 msg = _("working directory has unknown parent '%s'!")
520 msg = _("working directory has unknown parent '%s'!")
508 raise error.Abort(msg % short(changeid))
521 raise error.Abort(msg % short(changeid))
509 try:
522 try:
510 if len(changeid) == 20 and nonascii(changeid):
523 if len(changeid) == 20 and nonascii(changeid):
511 changeid = hex(changeid)
524 changeid = hex(changeid)
512 except TypeError:
525 except TypeError:
513 pass
526 pass
514 except (error.FilteredIndexError, error.FilteredLookupError,
527 except (error.FilteredIndexError, error.FilteredLookupError,
515 error.FilteredRepoLookupError):
528 error.FilteredRepoLookupError):
516 if repo.filtername.startswith('visible'):
529 raise _filterederror(repo, changeid)
517 msg = _("hidden revision '%s'") % changeid
518 hint = _('use --hidden to access hidden revisions')
519 raise error.FilteredRepoLookupError(msg, hint=hint)
520 msg = _("filtered revision '%s' (not in '%s' subset)")
521 msg %= (changeid, repo.filtername)
522 raise error.FilteredRepoLookupError(msg)
523 except IndexError:
530 except IndexError:
524 pass
531 pass
525 raise error.RepoLookupError(
532 raise error.RepoLookupError(
526 _("unknown revision '%s'") % changeid)
533 _("unknown revision '%s'") % changeid)
527
534
528 def __hash__(self):
535 def __hash__(self):
529 try:
536 try:
530 return hash(self._rev)
537 return hash(self._rev)
531 except AttributeError:
538 except AttributeError:
532 return id(self)
539 return id(self)
533
540
534 def __nonzero__(self):
541 def __nonzero__(self):
535 return self._rev != nullrev
542 return self._rev != nullrev
536
543
537 __bool__ = __nonzero__
544 __bool__ = __nonzero__
538
545
539 @propertycache
546 @propertycache
540 def _changeset(self):
547 def _changeset(self):
541 return self._repo.changelog.changelogrevision(self.rev())
548 return self._repo.changelog.changelogrevision(self.rev())
542
549
543 @propertycache
550 @propertycache
544 def _manifest(self):
551 def _manifest(self):
545 return self._manifestctx.read()
552 return self._manifestctx.read()
546
553
547 @propertycache
554 @propertycache
548 def _manifestctx(self):
555 def _manifestctx(self):
549 return self._repo.manifestlog[self._changeset.manifest]
556 return self._repo.manifestlog[self._changeset.manifest]
550
557
551 @propertycache
558 @propertycache
552 def _manifestdelta(self):
559 def _manifestdelta(self):
553 return self._manifestctx.readdelta()
560 return self._manifestctx.readdelta()
554
561
555 @propertycache
562 @propertycache
556 def _parents(self):
563 def _parents(self):
557 repo = self._repo
564 repo = self._repo
558 p1, p2 = repo.changelog.parentrevs(self._rev)
565 p1, p2 = repo.changelog.parentrevs(self._rev)
559 if p2 == nullrev:
566 if p2 == nullrev:
560 return [changectx(repo, p1)]
567 return [changectx(repo, p1)]
561 return [changectx(repo, p1), changectx(repo, p2)]
568 return [changectx(repo, p1), changectx(repo, p2)]
562
569
563 def changeset(self):
570 def changeset(self):
564 c = self._changeset
571 c = self._changeset
565 return (
572 return (
566 c.manifest,
573 c.manifest,
567 c.user,
574 c.user,
568 c.date,
575 c.date,
569 c.files,
576 c.files,
570 c.description,
577 c.description,
571 c.extra,
578 c.extra,
572 )
579 )
573 def manifestnode(self):
580 def manifestnode(self):
574 return self._changeset.manifest
581 return self._changeset.manifest
575
582
576 def user(self):
583 def user(self):
577 return self._changeset.user
584 return self._changeset.user
578 def date(self):
585 def date(self):
579 return self._changeset.date
586 return self._changeset.date
580 def files(self):
587 def files(self):
581 return self._changeset.files
588 return self._changeset.files
582 def description(self):
589 def description(self):
583 return self._changeset.description
590 return self._changeset.description
584 def branch(self):
591 def branch(self):
585 return encoding.tolocal(self._changeset.extra.get("branch"))
592 return encoding.tolocal(self._changeset.extra.get("branch"))
586 def closesbranch(self):
593 def closesbranch(self):
587 return 'close' in self._changeset.extra
594 return 'close' in self._changeset.extra
588 def extra(self):
595 def extra(self):
589 return self._changeset.extra
596 return self._changeset.extra
590 def tags(self):
597 def tags(self):
591 return self._repo.nodetags(self._node)
598 return self._repo.nodetags(self._node)
592 def bookmarks(self):
599 def bookmarks(self):
593 return self._repo.nodebookmarks(self._node)
600 return self._repo.nodebookmarks(self._node)
594 def phase(self):
601 def phase(self):
595 return self._repo._phasecache.phase(self._repo, self._rev)
602 return self._repo._phasecache.phase(self._repo, self._rev)
596 def hidden(self):
603 def hidden(self):
597 return self._rev in repoview.filterrevs(self._repo, 'visible')
604 return self._rev in repoview.filterrevs(self._repo, 'visible')
598
605
599 def children(self):
606 def children(self):
600 """return contexts for each child changeset"""
607 """return contexts for each child changeset"""
601 c = self._repo.changelog.children(self._node)
608 c = self._repo.changelog.children(self._node)
602 return [changectx(self._repo, x) for x in c]
609 return [changectx(self._repo, x) for x in c]
603
610
604 def ancestors(self):
611 def ancestors(self):
605 for a in self._repo.changelog.ancestors([self._rev]):
612 for a in self._repo.changelog.ancestors([self._rev]):
606 yield changectx(self._repo, a)
613 yield changectx(self._repo, a)
607
614
608 def descendants(self):
615 def descendants(self):
609 for d in self._repo.changelog.descendants([self._rev]):
616 for d in self._repo.changelog.descendants([self._rev]):
610 yield changectx(self._repo, d)
617 yield changectx(self._repo, d)
611
618
612 def filectx(self, path, fileid=None, filelog=None):
619 def filectx(self, path, fileid=None, filelog=None):
613 """get a file context from this changeset"""
620 """get a file context from this changeset"""
614 if fileid is None:
621 if fileid is None:
615 fileid = self.filenode(path)
622 fileid = self.filenode(path)
616 return filectx(self._repo, path, fileid=fileid,
623 return filectx(self._repo, path, fileid=fileid,
617 changectx=self, filelog=filelog)
624 changectx=self, filelog=filelog)
618
625
619 def ancestor(self, c2, warn=False):
626 def ancestor(self, c2, warn=False):
620 """return the "best" ancestor context of self and c2
627 """return the "best" ancestor context of self and c2
621
628
622 If there are multiple candidates, it will show a message and check
629 If there are multiple candidates, it will show a message and check
623 merge.preferancestor configuration before falling back to the
630 merge.preferancestor configuration before falling back to the
624 revlog ancestor."""
631 revlog ancestor."""
625 # deal with workingctxs
632 # deal with workingctxs
626 n2 = c2._node
633 n2 = c2._node
627 if n2 is None:
634 if n2 is None:
628 n2 = c2._parents[0]._node
635 n2 = c2._parents[0]._node
629 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
636 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
630 if not cahs:
637 if not cahs:
631 anc = nullid
638 anc = nullid
632 elif len(cahs) == 1:
639 elif len(cahs) == 1:
633 anc = cahs[0]
640 anc = cahs[0]
634 else:
641 else:
635 # experimental config: merge.preferancestor
642 # experimental config: merge.preferancestor
636 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
643 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
637 try:
644 try:
638 ctx = changectx(self._repo, r)
645 ctx = changectx(self._repo, r)
639 except error.RepoLookupError:
646 except error.RepoLookupError:
640 continue
647 continue
641 anc = ctx.node()
648 anc = ctx.node()
642 if anc in cahs:
649 if anc in cahs:
643 break
650 break
644 else:
651 else:
645 anc = self._repo.changelog.ancestor(self._node, n2)
652 anc = self._repo.changelog.ancestor(self._node, n2)
646 if warn:
653 if warn:
647 self._repo.ui.status(
654 self._repo.ui.status(
648 (_("note: using %s as ancestor of %s and %s\n") %
655 (_("note: using %s as ancestor of %s and %s\n") %
649 (short(anc), short(self._node), short(n2))) +
656 (short(anc), short(self._node), short(n2))) +
650 ''.join(_(" alternatively, use --config "
657 ''.join(_(" alternatively, use --config "
651 "merge.preferancestor=%s\n") %
658 "merge.preferancestor=%s\n") %
652 short(n) for n in sorted(cahs) if n != anc))
659 short(n) for n in sorted(cahs) if n != anc))
653 return changectx(self._repo, anc)
660 return changectx(self._repo, anc)
654
661
655 def descendant(self, other):
662 def descendant(self, other):
656 """True if other is descendant of this changeset"""
663 """True if other is descendant of this changeset"""
657 return self._repo.changelog.descendant(self._rev, other._rev)
664 return self._repo.changelog.descendant(self._rev, other._rev)
658
665
659 def walk(self, match):
666 def walk(self, match):
660 '''Generates matching file names.'''
667 '''Generates matching file names.'''
661
668
662 # Wrap match.bad method to have message with nodeid
669 # Wrap match.bad method to have message with nodeid
663 def bad(fn, msg):
670 def bad(fn, msg):
664 # The manifest doesn't know about subrepos, so don't complain about
671 # The manifest doesn't know about subrepos, so don't complain about
665 # paths into valid subrepos.
672 # paths into valid subrepos.
666 if any(fn == s or fn.startswith(s + '/')
673 if any(fn == s or fn.startswith(s + '/')
667 for s in self.substate):
674 for s in self.substate):
668 return
675 return
669 match.bad(fn, _('no such file in rev %s') % self)
676 match.bad(fn, _('no such file in rev %s') % self)
670
677
671 m = matchmod.badmatch(match, bad)
678 m = matchmod.badmatch(match, bad)
672 return self._manifest.walk(m)
679 return self._manifest.walk(m)
673
680
674 def matches(self, match):
681 def matches(self, match):
675 return self.walk(match)
682 return self.walk(match)
676
683
677 class basefilectx(object):
684 class basefilectx(object):
678 """A filecontext object represents the common logic for its children:
685 """A filecontext object represents the common logic for its children:
679 filectx: read-only access to a filerevision that is already present
686 filectx: read-only access to a filerevision that is already present
680 in the repo,
687 in the repo,
681 workingfilectx: a filecontext that represents files from the working
688 workingfilectx: a filecontext that represents files from the working
682 directory,
689 directory,
683 memfilectx: a filecontext that represents files in-memory."""
690 memfilectx: a filecontext that represents files in-memory."""
684 def __new__(cls, repo, path, *args, **kwargs):
691 def __new__(cls, repo, path, *args, **kwargs):
685 return super(basefilectx, cls).__new__(cls)
692 return super(basefilectx, cls).__new__(cls)
686
693
687 @propertycache
694 @propertycache
688 def _filelog(self):
695 def _filelog(self):
689 return self._repo.file(self._path)
696 return self._repo.file(self._path)
690
697
691 @propertycache
698 @propertycache
692 def _changeid(self):
699 def _changeid(self):
693 if '_changeid' in self.__dict__:
700 if '_changeid' in self.__dict__:
694 return self._changeid
701 return self._changeid
695 elif '_changectx' in self.__dict__:
702 elif '_changectx' in self.__dict__:
696 return self._changectx.rev()
703 return self._changectx.rev()
697 elif '_descendantrev' in self.__dict__:
704 elif '_descendantrev' in self.__dict__:
698 # this file context was created from a revision with a known
705 # this file context was created from a revision with a known
699 # descendant, we can (lazily) correct for linkrev aliases
706 # descendant, we can (lazily) correct for linkrev aliases
700 return self._adjustlinkrev(self._descendantrev)
707 return self._adjustlinkrev(self._descendantrev)
701 else:
708 else:
702 return self._filelog.linkrev(self._filerev)
709 return self._filelog.linkrev(self._filerev)
703
710
704 @propertycache
711 @propertycache
705 def _filenode(self):
712 def _filenode(self):
706 if '_fileid' in self.__dict__:
713 if '_fileid' in self.__dict__:
707 return self._filelog.lookup(self._fileid)
714 return self._filelog.lookup(self._fileid)
708 else:
715 else:
709 return self._changectx.filenode(self._path)
716 return self._changectx.filenode(self._path)
710
717
711 @propertycache
718 @propertycache
712 def _filerev(self):
719 def _filerev(self):
713 return self._filelog.rev(self._filenode)
720 return self._filelog.rev(self._filenode)
714
721
715 @propertycache
722 @propertycache
716 def _repopath(self):
723 def _repopath(self):
717 return self._path
724 return self._path
718
725
719 def __nonzero__(self):
726 def __nonzero__(self):
720 try:
727 try:
721 self._filenode
728 self._filenode
722 return True
729 return True
723 except error.LookupError:
730 except error.LookupError:
724 # file is missing
731 # file is missing
725 return False
732 return False
726
733
727 __bool__ = __nonzero__
734 __bool__ = __nonzero__
728
735
729 def __str__(self):
736 def __str__(self):
730 try:
737 try:
731 return "%s@%s" % (self.path(), self._changectx)
738 return "%s@%s" % (self.path(), self._changectx)
732 except error.LookupError:
739 except error.LookupError:
733 return "%s@???" % self.path()
740 return "%s@???" % self.path()
734
741
735 def __repr__(self):
742 def __repr__(self):
736 return "<%s %s>" % (type(self).__name__, str(self))
743 return "<%s %s>" % (type(self).__name__, str(self))
737
744
738 def __hash__(self):
745 def __hash__(self):
739 try:
746 try:
740 return hash((self._path, self._filenode))
747 return hash((self._path, self._filenode))
741 except AttributeError:
748 except AttributeError:
742 return id(self)
749 return id(self)
743
750
744 def __eq__(self, other):
751 def __eq__(self, other):
745 try:
752 try:
746 return (type(self) == type(other) and self._path == other._path
753 return (type(self) == type(other) and self._path == other._path
747 and self._filenode == other._filenode)
754 and self._filenode == other._filenode)
748 except AttributeError:
755 except AttributeError:
749 return False
756 return False
750
757
751 def __ne__(self, other):
758 def __ne__(self, other):
752 return not (self == other)
759 return not (self == other)
753
760
754 def filerev(self):
761 def filerev(self):
755 return self._filerev
762 return self._filerev
756 def filenode(self):
763 def filenode(self):
757 return self._filenode
764 return self._filenode
758 def flags(self):
765 def flags(self):
759 return self._changectx.flags(self._path)
766 return self._changectx.flags(self._path)
760 def filelog(self):
767 def filelog(self):
761 return self._filelog
768 return self._filelog
762 def rev(self):
769 def rev(self):
763 return self._changeid
770 return self._changeid
764 def linkrev(self):
771 def linkrev(self):
765 return self._filelog.linkrev(self._filerev)
772 return self._filelog.linkrev(self._filerev)
766 def node(self):
773 def node(self):
767 return self._changectx.node()
774 return self._changectx.node()
768 def hex(self):
775 def hex(self):
769 return self._changectx.hex()
776 return self._changectx.hex()
770 def user(self):
777 def user(self):
771 return self._changectx.user()
778 return self._changectx.user()
772 def date(self):
779 def date(self):
773 return self._changectx.date()
780 return self._changectx.date()
774 def files(self):
781 def files(self):
775 return self._changectx.files()
782 return self._changectx.files()
776 def description(self):
783 def description(self):
777 return self._changectx.description()
784 return self._changectx.description()
778 def branch(self):
785 def branch(self):
779 return self._changectx.branch()
786 return self._changectx.branch()
780 def extra(self):
787 def extra(self):
781 return self._changectx.extra()
788 return self._changectx.extra()
782 def phase(self):
789 def phase(self):
783 return self._changectx.phase()
790 return self._changectx.phase()
784 def phasestr(self):
791 def phasestr(self):
785 return self._changectx.phasestr()
792 return self._changectx.phasestr()
786 def manifest(self):
793 def manifest(self):
787 return self._changectx.manifest()
794 return self._changectx.manifest()
788 def changectx(self):
795 def changectx(self):
789 return self._changectx
796 return self._changectx
790 def repo(self):
797 def repo(self):
791 return self._repo
798 return self._repo
792
799
793 def path(self):
800 def path(self):
794 return self._path
801 return self._path
795
802
796 def isbinary(self):
803 def isbinary(self):
797 try:
804 try:
798 return util.binary(self.data())
805 return util.binary(self.data())
799 except IOError:
806 except IOError:
800 return False
807 return False
801 def isexec(self):
808 def isexec(self):
802 return 'x' in self.flags()
809 return 'x' in self.flags()
803 def islink(self):
810 def islink(self):
804 return 'l' in self.flags()
811 return 'l' in self.flags()
805
812
806 def isabsent(self):
813 def isabsent(self):
807 """whether this filectx represents a file not in self._changectx
814 """whether this filectx represents a file not in self._changectx
808
815
809 This is mainly for merge code to detect change/delete conflicts. This is
816 This is mainly for merge code to detect change/delete conflicts. This is
810 expected to be True for all subclasses of basectx."""
817 expected to be True for all subclasses of basectx."""
811 return False
818 return False
812
819
813 _customcmp = False
820 _customcmp = False
814 def cmp(self, fctx):
821 def cmp(self, fctx):
815 """compare with other file context
822 """compare with other file context
816
823
817 returns True if different than fctx.
824 returns True if different than fctx.
818 """
825 """
819 if fctx._customcmp:
826 if fctx._customcmp:
820 return fctx.cmp(self)
827 return fctx.cmp(self)
821
828
822 if (fctx._filenode is None
829 if (fctx._filenode is None
823 and (self._repo._encodefilterpats
830 and (self._repo._encodefilterpats
824 # if file data starts with '\1\n', empty metadata block is
831 # if file data starts with '\1\n', empty metadata block is
825 # prepended, which adds 4 bytes to filelog.size().
832 # prepended, which adds 4 bytes to filelog.size().
826 or self.size() - 4 == fctx.size())
833 or self.size() - 4 == fctx.size())
827 or self.size() == fctx.size()):
834 or self.size() == fctx.size()):
828 return self._filelog.cmp(self._filenode, fctx.data())
835 return self._filelog.cmp(self._filenode, fctx.data())
829
836
830 return True
837 return True
831
838
832 def _adjustlinkrev(self, srcrev, inclusive=False):
839 def _adjustlinkrev(self, srcrev, inclusive=False):
833 """return the first ancestor of <srcrev> introducing <fnode>
840 """return the first ancestor of <srcrev> introducing <fnode>
834
841
835 If the linkrev of the file revision does not point to an ancestor of
842 If the linkrev of the file revision does not point to an ancestor of
836 srcrev, we'll walk down the ancestors until we find one introducing
843 srcrev, we'll walk down the ancestors until we find one introducing
837 this file revision.
844 this file revision.
838
845
839 :srcrev: the changeset revision we search ancestors from
846 :srcrev: the changeset revision we search ancestors from
840 :inclusive: if true, the src revision will also be checked
847 :inclusive: if true, the src revision will also be checked
841 """
848 """
842 repo = self._repo
849 repo = self._repo
843 cl = repo.unfiltered().changelog
850 cl = repo.unfiltered().changelog
844 mfl = repo.manifestlog
851 mfl = repo.manifestlog
845 # fetch the linkrev
852 # fetch the linkrev
846 lkr = self.linkrev()
853 lkr = self.linkrev()
847 # hack to reuse ancestor computation when searching for renames
854 # hack to reuse ancestor computation when searching for renames
848 memberanc = getattr(self, '_ancestrycontext', None)
855 memberanc = getattr(self, '_ancestrycontext', None)
849 iteranc = None
856 iteranc = None
850 if srcrev is None:
857 if srcrev is None:
851 # wctx case, used by workingfilectx during mergecopy
858 # wctx case, used by workingfilectx during mergecopy
852 revs = [p.rev() for p in self._repo[None].parents()]
859 revs = [p.rev() for p in self._repo[None].parents()]
853 inclusive = True # we skipped the real (revless) source
860 inclusive = True # we skipped the real (revless) source
854 else:
861 else:
855 revs = [srcrev]
862 revs = [srcrev]
856 if memberanc is None:
863 if memberanc is None:
857 memberanc = iteranc = cl.ancestors(revs, lkr,
864 memberanc = iteranc = cl.ancestors(revs, lkr,
858 inclusive=inclusive)
865 inclusive=inclusive)
859 # check if this linkrev is an ancestor of srcrev
866 # check if this linkrev is an ancestor of srcrev
860 if lkr not in memberanc:
867 if lkr not in memberanc:
861 if iteranc is None:
868 if iteranc is None:
862 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
869 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
863 fnode = self._filenode
870 fnode = self._filenode
864 path = self._path
871 path = self._path
865 for a in iteranc:
872 for a in iteranc:
866 ac = cl.read(a) # get changeset data (we avoid object creation)
873 ac = cl.read(a) # get changeset data (we avoid object creation)
867 if path in ac[3]: # checking the 'files' field.
874 if path in ac[3]: # checking the 'files' field.
868 # The file has been touched, check if the content is
875 # The file has been touched, check if the content is
869 # similar to the one we search for.
876 # similar to the one we search for.
870 if fnode == mfl[ac[0]].readfast().get(path):
877 if fnode == mfl[ac[0]].readfast().get(path):
871 return a
878 return a
872 # In theory, we should never get out of that loop without a result.
879 # In theory, we should never get out of that loop without a result.
873 # But if manifest uses a buggy file revision (not children of the
880 # But if manifest uses a buggy file revision (not children of the
874 # one it replaces) we could. Such a buggy situation will likely
881 # one it replaces) we could. Such a buggy situation will likely
875 # result is crash somewhere else at to some point.
882 # result is crash somewhere else at to some point.
876 return lkr
883 return lkr
877
884
878 def introrev(self):
885 def introrev(self):
879 """return the rev of the changeset which introduced this file revision
886 """return the rev of the changeset which introduced this file revision
880
887
881 This method is different from linkrev because it take into account the
888 This method is different from linkrev because it take into account the
882 changeset the filectx was created from. It ensures the returned
889 changeset the filectx was created from. It ensures the returned
883 revision is one of its ancestors. This prevents bugs from
890 revision is one of its ancestors. This prevents bugs from
884 'linkrev-shadowing' when a file revision is used by multiple
891 'linkrev-shadowing' when a file revision is used by multiple
885 changesets.
892 changesets.
886 """
893 """
887 lkr = self.linkrev()
894 lkr = self.linkrev()
888 attrs = vars(self)
895 attrs = vars(self)
889 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
896 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
890 if noctx or self.rev() == lkr:
897 if noctx or self.rev() == lkr:
891 return self.linkrev()
898 return self.linkrev()
892 return self._adjustlinkrev(self.rev(), inclusive=True)
899 return self._adjustlinkrev(self.rev(), inclusive=True)
893
900
894 def _parentfilectx(self, path, fileid, filelog):
901 def _parentfilectx(self, path, fileid, filelog):
895 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
902 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
896 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
903 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
897 if '_changeid' in vars(self) or '_changectx' in vars(self):
904 if '_changeid' in vars(self) or '_changectx' in vars(self):
898 # If self is associated with a changeset (probably explicitly
905 # If self is associated with a changeset (probably explicitly
899 # fed), ensure the created filectx is associated with a
906 # fed), ensure the created filectx is associated with a
900 # changeset that is an ancestor of self.changectx.
907 # changeset that is an ancestor of self.changectx.
901 # This lets us later use _adjustlinkrev to get a correct link.
908 # This lets us later use _adjustlinkrev to get a correct link.
902 fctx._descendantrev = self.rev()
909 fctx._descendantrev = self.rev()
903 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
910 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
904 elif '_descendantrev' in vars(self):
911 elif '_descendantrev' in vars(self):
905 # Otherwise propagate _descendantrev if we have one associated.
912 # Otherwise propagate _descendantrev if we have one associated.
906 fctx._descendantrev = self._descendantrev
913 fctx._descendantrev = self._descendantrev
907 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
914 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
908 return fctx
915 return fctx
909
916
910 def parents(self):
917 def parents(self):
911 _path = self._path
918 _path = self._path
912 fl = self._filelog
919 fl = self._filelog
913 parents = self._filelog.parents(self._filenode)
920 parents = self._filelog.parents(self._filenode)
914 pl = [(_path, node, fl) for node in parents if node != nullid]
921 pl = [(_path, node, fl) for node in parents if node != nullid]
915
922
916 r = fl.renamed(self._filenode)
923 r = fl.renamed(self._filenode)
917 if r:
924 if r:
918 # - In the simple rename case, both parent are nullid, pl is empty.
925 # - In the simple rename case, both parent are nullid, pl is empty.
919 # - In case of merge, only one of the parent is null id and should
926 # - In case of merge, only one of the parent is null id and should
920 # be replaced with the rename information. This parent is -always-
927 # be replaced with the rename information. This parent is -always-
921 # the first one.
928 # the first one.
922 #
929 #
923 # As null id have always been filtered out in the previous list
930 # As null id have always been filtered out in the previous list
924 # comprehension, inserting to 0 will always result in "replacing
931 # comprehension, inserting to 0 will always result in "replacing
925 # first nullid parent with rename information.
932 # first nullid parent with rename information.
926 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
933 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
927
934
928 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
935 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
929
936
930 def p1(self):
937 def p1(self):
931 return self.parents()[0]
938 return self.parents()[0]
932
939
933 def p2(self):
940 def p2(self):
934 p = self.parents()
941 p = self.parents()
935 if len(p) == 2:
942 if len(p) == 2:
936 return p[1]
943 return p[1]
937 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
944 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
938
945
939 def annotate(self, follow=False, linenumber=False, diffopts=None):
946 def annotate(self, follow=False, linenumber=False, diffopts=None):
940 '''returns a list of tuples of ((ctx, number), line) for each line
947 '''returns a list of tuples of ((ctx, number), line) for each line
941 in the file, where ctx is the filectx of the node where
948 in the file, where ctx is the filectx of the node where
942 that line was last changed; if linenumber parameter is true, number is
949 that line was last changed; if linenumber parameter is true, number is
943 the line number at the first appearance in the managed file, otherwise,
950 the line number at the first appearance in the managed file, otherwise,
944 number has a fixed value of False.
951 number has a fixed value of False.
945 '''
952 '''
946
953
947 def lines(text):
954 def lines(text):
948 if text.endswith("\n"):
955 if text.endswith("\n"):
949 return text.count("\n")
956 return text.count("\n")
950 return text.count("\n") + int(bool(text))
957 return text.count("\n") + int(bool(text))
951
958
952 if linenumber:
959 if linenumber:
953 def decorate(text, rev):
960 def decorate(text, rev):
954 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
961 return ([(rev, i) for i in xrange(1, lines(text) + 1)], text)
955 else:
962 else:
956 def decorate(text, rev):
963 def decorate(text, rev):
957 return ([(rev, False)] * lines(text), text)
964 return ([(rev, False)] * lines(text), text)
958
965
959 def pair(parent, child):
966 def pair(parent, child):
960 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts)
967 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts)
961 for (a1, a2, b1, b2), t in blocks:
968 for (a1, a2, b1, b2), t in blocks:
962 # Changed blocks ('!') or blocks made only of blank lines ('~')
969 # Changed blocks ('!') or blocks made only of blank lines ('~')
963 # belong to the child.
970 # belong to the child.
964 if t == '=':
971 if t == '=':
965 child[0][b1:b2] = parent[0][a1:a2]
972 child[0][b1:b2] = parent[0][a1:a2]
966 return child
973 return child
967
974
968 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
975 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
969
976
970 def parents(f):
977 def parents(f):
971 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
978 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
972 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
979 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
973 # from the topmost introrev (= srcrev) down to p.linkrev() if it
980 # from the topmost introrev (= srcrev) down to p.linkrev() if it
974 # isn't an ancestor of the srcrev.
981 # isn't an ancestor of the srcrev.
975 f._changeid
982 f._changeid
976 pl = f.parents()
983 pl = f.parents()
977
984
978 # Don't return renamed parents if we aren't following.
985 # Don't return renamed parents if we aren't following.
979 if not follow:
986 if not follow:
980 pl = [p for p in pl if p.path() == f.path()]
987 pl = [p for p in pl if p.path() == f.path()]
981
988
982 # renamed filectx won't have a filelog yet, so set it
989 # renamed filectx won't have a filelog yet, so set it
983 # from the cache to save time
990 # from the cache to save time
984 for p in pl:
991 for p in pl:
985 if not '_filelog' in p.__dict__:
992 if not '_filelog' in p.__dict__:
986 p._filelog = getlog(p.path())
993 p._filelog = getlog(p.path())
987
994
988 return pl
995 return pl
989
996
990 # use linkrev to find the first changeset where self appeared
997 # use linkrev to find the first changeset where self appeared
991 base = self
998 base = self
992 introrev = self.introrev()
999 introrev = self.introrev()
993 if self.rev() != introrev:
1000 if self.rev() != introrev:
994 base = self.filectx(self.filenode(), changeid=introrev)
1001 base = self.filectx(self.filenode(), changeid=introrev)
995 if getattr(base, '_ancestrycontext', None) is None:
1002 if getattr(base, '_ancestrycontext', None) is None:
996 cl = self._repo.changelog
1003 cl = self._repo.changelog
997 if introrev is None:
1004 if introrev is None:
998 # wctx is not inclusive, but works because _ancestrycontext
1005 # wctx is not inclusive, but works because _ancestrycontext
999 # is used to test filelog revisions
1006 # is used to test filelog revisions
1000 ac = cl.ancestors([p.rev() for p in base.parents()],
1007 ac = cl.ancestors([p.rev() for p in base.parents()],
1001 inclusive=True)
1008 inclusive=True)
1002 else:
1009 else:
1003 ac = cl.ancestors([introrev], inclusive=True)
1010 ac = cl.ancestors([introrev], inclusive=True)
1004 base._ancestrycontext = ac
1011 base._ancestrycontext = ac
1005
1012
1006 # This algorithm would prefer to be recursive, but Python is a
1013 # This algorithm would prefer to be recursive, but Python is a
1007 # bit recursion-hostile. Instead we do an iterative
1014 # bit recursion-hostile. Instead we do an iterative
1008 # depth-first search.
1015 # depth-first search.
1009
1016
1010 # 1st DFS pre-calculates pcache and needed
1017 # 1st DFS pre-calculates pcache and needed
1011 visit = [base]
1018 visit = [base]
1012 pcache = {}
1019 pcache = {}
1013 needed = {base: 1}
1020 needed = {base: 1}
1014 while visit:
1021 while visit:
1015 f = visit.pop()
1022 f = visit.pop()
1016 if f in pcache:
1023 if f in pcache:
1017 continue
1024 continue
1018 pl = parents(f)
1025 pl = parents(f)
1019 pcache[f] = pl
1026 pcache[f] = pl
1020 for p in pl:
1027 for p in pl:
1021 needed[p] = needed.get(p, 0) + 1
1028 needed[p] = needed.get(p, 0) + 1
1022 if p not in pcache:
1029 if p not in pcache:
1023 visit.append(p)
1030 visit.append(p)
1024
1031
1025 # 2nd DFS does the actual annotate
1032 # 2nd DFS does the actual annotate
1026 visit[:] = [base]
1033 visit[:] = [base]
1027 hist = {}
1034 hist = {}
1028 while visit:
1035 while visit:
1029 f = visit[-1]
1036 f = visit[-1]
1030 if f in hist:
1037 if f in hist:
1031 visit.pop()
1038 visit.pop()
1032 continue
1039 continue
1033
1040
1034 ready = True
1041 ready = True
1035 pl = pcache[f]
1042 pl = pcache[f]
1036 for p in pl:
1043 for p in pl:
1037 if p not in hist:
1044 if p not in hist:
1038 ready = False
1045 ready = False
1039 visit.append(p)
1046 visit.append(p)
1040 if ready:
1047 if ready:
1041 visit.pop()
1048 visit.pop()
1042 curr = decorate(f.data(), f)
1049 curr = decorate(f.data(), f)
1043 for p in pl:
1050 for p in pl:
1044 curr = pair(hist[p], curr)
1051 curr = pair(hist[p], curr)
1045 if needed[p] == 1:
1052 if needed[p] == 1:
1046 del hist[p]
1053 del hist[p]
1047 del needed[p]
1054 del needed[p]
1048 else:
1055 else:
1049 needed[p] -= 1
1056 needed[p] -= 1
1050
1057
1051 hist[f] = curr
1058 hist[f] = curr
1052 del pcache[f]
1059 del pcache[f]
1053
1060
1054 return zip(hist[base][0], hist[base][1].splitlines(True))
1061 return zip(hist[base][0], hist[base][1].splitlines(True))
1055
1062
1056 def ancestors(self, followfirst=False):
1063 def ancestors(self, followfirst=False):
1057 visit = {}
1064 visit = {}
1058 c = self
1065 c = self
1059 if followfirst:
1066 if followfirst:
1060 cut = 1
1067 cut = 1
1061 else:
1068 else:
1062 cut = None
1069 cut = None
1063
1070
1064 while True:
1071 while True:
1065 for parent in c.parents()[:cut]:
1072 for parent in c.parents()[:cut]:
1066 visit[(parent.linkrev(), parent.filenode())] = parent
1073 visit[(parent.linkrev(), parent.filenode())] = parent
1067 if not visit:
1074 if not visit:
1068 break
1075 break
1069 c = visit.pop(max(visit))
1076 c = visit.pop(max(visit))
1070 yield c
1077 yield c
1071
1078
1072 class filectx(basefilectx):
1079 class filectx(basefilectx):
1073 """A filecontext object makes access to data related to a particular
1080 """A filecontext object makes access to data related to a particular
1074 filerevision convenient."""
1081 filerevision convenient."""
1075 def __init__(self, repo, path, changeid=None, fileid=None,
1082 def __init__(self, repo, path, changeid=None, fileid=None,
1076 filelog=None, changectx=None):
1083 filelog=None, changectx=None):
1077 """changeid can be a changeset revision, node, or tag.
1084 """changeid can be a changeset revision, node, or tag.
1078 fileid can be a file revision or node."""
1085 fileid can be a file revision or node."""
1079 self._repo = repo
1086 self._repo = repo
1080 self._path = path
1087 self._path = path
1081
1088
1082 assert (changeid is not None
1089 assert (changeid is not None
1083 or fileid is not None
1090 or fileid is not None
1084 or changectx is not None), \
1091 or changectx is not None), \
1085 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1092 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1086 % (changeid, fileid, changectx))
1093 % (changeid, fileid, changectx))
1087
1094
1088 if filelog is not None:
1095 if filelog is not None:
1089 self._filelog = filelog
1096 self._filelog = filelog
1090
1097
1091 if changeid is not None:
1098 if changeid is not None:
1092 self._changeid = changeid
1099 self._changeid = changeid
1093 if changectx is not None:
1100 if changectx is not None:
1094 self._changectx = changectx
1101 self._changectx = changectx
1095 if fileid is not None:
1102 if fileid is not None:
1096 self._fileid = fileid
1103 self._fileid = fileid
1097
1104
1098 @propertycache
1105 @propertycache
1099 def _changectx(self):
1106 def _changectx(self):
1100 try:
1107 try:
1101 return changectx(self._repo, self._changeid)
1108 return changectx(self._repo, self._changeid)
1102 except error.FilteredRepoLookupError:
1109 except error.FilteredRepoLookupError:
1103 # Linkrev may point to any revision in the repository. When the
1110 # Linkrev may point to any revision in the repository. When the
1104 # repository is filtered this may lead to `filectx` trying to build
1111 # repository is filtered this may lead to `filectx` trying to build
1105 # `changectx` for filtered revision. In such case we fallback to
1112 # `changectx` for filtered revision. In such case we fallback to
1106 # creating `changectx` on the unfiltered version of the reposition.
1113 # creating `changectx` on the unfiltered version of the reposition.
1107 # This fallback should not be an issue because `changectx` from
1114 # This fallback should not be an issue because `changectx` from
1108 # `filectx` are not used in complex operations that care about
1115 # `filectx` are not used in complex operations that care about
1109 # filtering.
1116 # filtering.
1110 #
1117 #
1111 # This fallback is a cheap and dirty fix that prevent several
1118 # This fallback is a cheap and dirty fix that prevent several
1112 # crashes. It does not ensure the behavior is correct. However the
1119 # crashes. It does not ensure the behavior is correct. However the
1113 # behavior was not correct before filtering either and "incorrect
1120 # behavior was not correct before filtering either and "incorrect
1114 # behavior" is seen as better as "crash"
1121 # behavior" is seen as better as "crash"
1115 #
1122 #
1116 # Linkrevs have several serious troubles with filtering that are
1123 # Linkrevs have several serious troubles with filtering that are
1117 # complicated to solve. Proper handling of the issue here should be
1124 # complicated to solve. Proper handling of the issue here should be
1118 # considered when solving linkrev issue are on the table.
1125 # considered when solving linkrev issue are on the table.
1119 return changectx(self._repo.unfiltered(), self._changeid)
1126 return changectx(self._repo.unfiltered(), self._changeid)
1120
1127
1121 def filectx(self, fileid, changeid=None):
1128 def filectx(self, fileid, changeid=None):
1122 '''opens an arbitrary revision of the file without
1129 '''opens an arbitrary revision of the file without
1123 opening a new filelog'''
1130 opening a new filelog'''
1124 return filectx(self._repo, self._path, fileid=fileid,
1131 return filectx(self._repo, self._path, fileid=fileid,
1125 filelog=self._filelog, changeid=changeid)
1132 filelog=self._filelog, changeid=changeid)
1126
1133
1127 def rawdata(self):
1134 def rawdata(self):
1128 return self._filelog.revision(self._filenode, raw=True)
1135 return self._filelog.revision(self._filenode, raw=True)
1129
1136
1130 def data(self):
1137 def data(self):
1131 try:
1138 try:
1132 return self._filelog.read(self._filenode)
1139 return self._filelog.read(self._filenode)
1133 except error.CensoredNodeError:
1140 except error.CensoredNodeError:
1134 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1141 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1135 return ""
1142 return ""
1136 raise error.Abort(_("censored node: %s") % short(self._filenode),
1143 raise error.Abort(_("censored node: %s") % short(self._filenode),
1137 hint=_("set censor.policy to ignore errors"))
1144 hint=_("set censor.policy to ignore errors"))
1138
1145
1139 def size(self):
1146 def size(self):
1140 return self._filelog.size(self._filerev)
1147 return self._filelog.size(self._filerev)
1141
1148
1142 def renamed(self):
1149 def renamed(self):
1143 """check if file was actually renamed in this changeset revision
1150 """check if file was actually renamed in this changeset revision
1144
1151
1145 If rename logged in file revision, we report copy for changeset only
1152 If rename logged in file revision, we report copy for changeset only
1146 if file revisions linkrev points back to the changeset in question
1153 if file revisions linkrev points back to the changeset in question
1147 or both changeset parents contain different file revisions.
1154 or both changeset parents contain different file revisions.
1148 """
1155 """
1149
1156
1150 renamed = self._filelog.renamed(self._filenode)
1157 renamed = self._filelog.renamed(self._filenode)
1151 if not renamed:
1158 if not renamed:
1152 return renamed
1159 return renamed
1153
1160
1154 if self.rev() == self.linkrev():
1161 if self.rev() == self.linkrev():
1155 return renamed
1162 return renamed
1156
1163
1157 name = self.path()
1164 name = self.path()
1158 fnode = self._filenode
1165 fnode = self._filenode
1159 for p in self._changectx.parents():
1166 for p in self._changectx.parents():
1160 try:
1167 try:
1161 if fnode == p.filenode(name):
1168 if fnode == p.filenode(name):
1162 return None
1169 return None
1163 except error.LookupError:
1170 except error.LookupError:
1164 pass
1171 pass
1165 return renamed
1172 return renamed
1166
1173
1167 def children(self):
1174 def children(self):
1168 # hard for renames
1175 # hard for renames
1169 c = self._filelog.children(self._filenode)
1176 c = self._filelog.children(self._filenode)
1170 return [filectx(self._repo, self._path, fileid=x,
1177 return [filectx(self._repo, self._path, fileid=x,
1171 filelog=self._filelog) for x in c]
1178 filelog=self._filelog) for x in c]
1172
1179
1173 def _changesrange(fctx1, fctx2, linerange2, diffopts):
1180 def _changesrange(fctx1, fctx2, linerange2, diffopts):
1174 """Return `(diffinrange, linerange1)` where `diffinrange` is True
1181 """Return `(diffinrange, linerange1)` where `diffinrange` is True
1175 if diff from fctx2 to fctx1 has changes in linerange2 and
1182 if diff from fctx2 to fctx1 has changes in linerange2 and
1176 `linerange1` is the new line range for fctx1.
1183 `linerange1` is the new line range for fctx1.
1177 """
1184 """
1178 blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
1185 blocks = mdiff.allblocks(fctx1.data(), fctx2.data(), diffopts)
1179 filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
1186 filteredblocks, linerange1 = mdiff.blocksinrange(blocks, linerange2)
1180 diffinrange = any(stype == '!' for _, stype in filteredblocks)
1187 diffinrange = any(stype == '!' for _, stype in filteredblocks)
1181 return diffinrange, linerange1
1188 return diffinrange, linerange1
1182
1189
1183 def blockancestors(fctx, fromline, toline, followfirst=False):
1190 def blockancestors(fctx, fromline, toline, followfirst=False):
1184 """Yield ancestors of `fctx` with respect to the block of lines within
1191 """Yield ancestors of `fctx` with respect to the block of lines within
1185 `fromline`-`toline` range.
1192 `fromline`-`toline` range.
1186 """
1193 """
1187 diffopts = patch.diffopts(fctx._repo.ui)
1194 diffopts = patch.diffopts(fctx._repo.ui)
1188 visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
1195 visit = {(fctx.linkrev(), fctx.filenode()): (fctx, (fromline, toline))}
1189 while visit:
1196 while visit:
1190 c, linerange2 = visit.pop(max(visit))
1197 c, linerange2 = visit.pop(max(visit))
1191 pl = c.parents()
1198 pl = c.parents()
1192 if followfirst:
1199 if followfirst:
1193 pl = pl[:1]
1200 pl = pl[:1]
1194 if not pl:
1201 if not pl:
1195 # The block originates from the initial revision.
1202 # The block originates from the initial revision.
1196 yield c, linerange2
1203 yield c, linerange2
1197 continue
1204 continue
1198 inrange = False
1205 inrange = False
1199 for p in pl:
1206 for p in pl:
1200 inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
1207 inrangep, linerange1 = _changesrange(p, c, linerange2, diffopts)
1201 inrange = inrange or inrangep
1208 inrange = inrange or inrangep
1202 if linerange1[0] == linerange1[1]:
1209 if linerange1[0] == linerange1[1]:
1203 # Parent's linerange is empty, meaning that the block got
1210 # Parent's linerange is empty, meaning that the block got
1204 # introduced in this revision; no need to go futher in this
1211 # introduced in this revision; no need to go futher in this
1205 # branch.
1212 # branch.
1206 continue
1213 continue
1207 visit[p.linkrev(), p.filenode()] = p, linerange1
1214 visit[p.linkrev(), p.filenode()] = p, linerange1
1208 if inrange:
1215 if inrange:
1209 yield c, linerange2
1216 yield c, linerange2
1210
1217
1211 def blockdescendants(fctx, fromline, toline):
1218 def blockdescendants(fctx, fromline, toline):
1212 """Yield descendants of `fctx` with respect to the block of lines within
1219 """Yield descendants of `fctx` with respect to the block of lines within
1213 `fromline`-`toline` range.
1220 `fromline`-`toline` range.
1214 """
1221 """
1215 # First possibly yield 'fctx' if it has changes in range with respect to
1222 # First possibly yield 'fctx' if it has changes in range with respect to
1216 # its parents.
1223 # its parents.
1217 try:
1224 try:
1218 c, linerange1 = next(blockancestors(fctx, fromline, toline))
1225 c, linerange1 = next(blockancestors(fctx, fromline, toline))
1219 except StopIteration:
1226 except StopIteration:
1220 pass
1227 pass
1221 else:
1228 else:
1222 if c == fctx:
1229 if c == fctx:
1223 yield c, linerange1
1230 yield c, linerange1
1224
1231
1225 diffopts = patch.diffopts(fctx._repo.ui)
1232 diffopts = patch.diffopts(fctx._repo.ui)
1226 fl = fctx.filelog()
1233 fl = fctx.filelog()
1227 seen = {fctx.filerev(): (fctx, (fromline, toline))}
1234 seen = {fctx.filerev(): (fctx, (fromline, toline))}
1228 for i in fl.descendants([fctx.filerev()]):
1235 for i in fl.descendants([fctx.filerev()]):
1229 c = fctx.filectx(i)
1236 c = fctx.filectx(i)
1230 inrange = False
1237 inrange = False
1231 for x in fl.parentrevs(i):
1238 for x in fl.parentrevs(i):
1232 try:
1239 try:
1233 p, linerange2 = seen[x]
1240 p, linerange2 = seen[x]
1234 except KeyError:
1241 except KeyError:
1235 # nullrev or other branch
1242 # nullrev or other branch
1236 continue
1243 continue
1237 inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
1244 inrangep, linerange1 = _changesrange(c, p, linerange2, diffopts)
1238 inrange = inrange or inrangep
1245 inrange = inrange or inrangep
1239 # If revision 'i' has been seen (it's a merge), we assume that its
1246 # If revision 'i' has been seen (it's a merge), we assume that its
1240 # line range is the same independently of which parents was used
1247 # line range is the same independently of which parents was used
1241 # to compute it.
1248 # to compute it.
1242 assert i not in seen or seen[i][1] == linerange1, (
1249 assert i not in seen or seen[i][1] == linerange1, (
1243 'computed line range for %s is not consistent between '
1250 'computed line range for %s is not consistent between '
1244 'ancestor branches' % c)
1251 'ancestor branches' % c)
1245 seen[i] = c, linerange1
1252 seen[i] = c, linerange1
1246 if inrange:
1253 if inrange:
1247 yield c, linerange1
1254 yield c, linerange1
1248
1255
1249 class committablectx(basectx):
1256 class committablectx(basectx):
1250 """A committablectx object provides common functionality for a context that
1257 """A committablectx object provides common functionality for a context that
1251 wants the ability to commit, e.g. workingctx or memctx."""
1258 wants the ability to commit, e.g. workingctx or memctx."""
1252 def __init__(self, repo, text="", user=None, date=None, extra=None,
1259 def __init__(self, repo, text="", user=None, date=None, extra=None,
1253 changes=None):
1260 changes=None):
1254 self._repo = repo
1261 self._repo = repo
1255 self._rev = None
1262 self._rev = None
1256 self._node = None
1263 self._node = None
1257 self._text = text
1264 self._text = text
1258 if date:
1265 if date:
1259 self._date = util.parsedate(date)
1266 self._date = util.parsedate(date)
1260 if user:
1267 if user:
1261 self._user = user
1268 self._user = user
1262 if changes:
1269 if changes:
1263 self._status = changes
1270 self._status = changes
1264
1271
1265 self._extra = {}
1272 self._extra = {}
1266 if extra:
1273 if extra:
1267 self._extra = extra.copy()
1274 self._extra = extra.copy()
1268 if 'branch' not in self._extra:
1275 if 'branch' not in self._extra:
1269 try:
1276 try:
1270 branch = encoding.fromlocal(self._repo.dirstate.branch())
1277 branch = encoding.fromlocal(self._repo.dirstate.branch())
1271 except UnicodeDecodeError:
1278 except UnicodeDecodeError:
1272 raise error.Abort(_('branch name not in UTF-8!'))
1279 raise error.Abort(_('branch name not in UTF-8!'))
1273 self._extra['branch'] = branch
1280 self._extra['branch'] = branch
1274 if self._extra['branch'] == '':
1281 if self._extra['branch'] == '':
1275 self._extra['branch'] = 'default'
1282 self._extra['branch'] = 'default'
1276
1283
1277 def __str__(self):
1284 def __str__(self):
1278 return str(self._parents[0]) + "+"
1285 return str(self._parents[0]) + "+"
1279
1286
1280 def __nonzero__(self):
1287 def __nonzero__(self):
1281 return True
1288 return True
1282
1289
1283 __bool__ = __nonzero__
1290 __bool__ = __nonzero__
1284
1291
1285 def _buildflagfunc(self):
1292 def _buildflagfunc(self):
1286 # Create a fallback function for getting file flags when the
1293 # Create a fallback function for getting file flags when the
1287 # filesystem doesn't support them
1294 # filesystem doesn't support them
1288
1295
1289 copiesget = self._repo.dirstate.copies().get
1296 copiesget = self._repo.dirstate.copies().get
1290 parents = self.parents()
1297 parents = self.parents()
1291 if len(parents) < 2:
1298 if len(parents) < 2:
1292 # when we have one parent, it's easy: copy from parent
1299 # when we have one parent, it's easy: copy from parent
1293 man = parents[0].manifest()
1300 man = parents[0].manifest()
1294 def func(f):
1301 def func(f):
1295 f = copiesget(f, f)
1302 f = copiesget(f, f)
1296 return man.flags(f)
1303 return man.flags(f)
1297 else:
1304 else:
1298 # merges are tricky: we try to reconstruct the unstored
1305 # merges are tricky: we try to reconstruct the unstored
1299 # result from the merge (issue1802)
1306 # result from the merge (issue1802)
1300 p1, p2 = parents
1307 p1, p2 = parents
1301 pa = p1.ancestor(p2)
1308 pa = p1.ancestor(p2)
1302 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1309 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1303
1310
1304 def func(f):
1311 def func(f):
1305 f = copiesget(f, f) # may be wrong for merges with copies
1312 f = copiesget(f, f) # may be wrong for merges with copies
1306 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1313 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1307 if fl1 == fl2:
1314 if fl1 == fl2:
1308 return fl1
1315 return fl1
1309 if fl1 == fla:
1316 if fl1 == fla:
1310 return fl2
1317 return fl2
1311 if fl2 == fla:
1318 if fl2 == fla:
1312 return fl1
1319 return fl1
1313 return '' # punt for conflicts
1320 return '' # punt for conflicts
1314
1321
1315 return func
1322 return func
1316
1323
1317 @propertycache
1324 @propertycache
1318 def _flagfunc(self):
1325 def _flagfunc(self):
1319 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1326 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1320
1327
1321 @propertycache
1328 @propertycache
1322 def _status(self):
1329 def _status(self):
1323 return self._repo.status()
1330 return self._repo.status()
1324
1331
1325 @propertycache
1332 @propertycache
1326 def _user(self):
1333 def _user(self):
1327 return self._repo.ui.username()
1334 return self._repo.ui.username()
1328
1335
1329 @propertycache
1336 @propertycache
1330 def _date(self):
1337 def _date(self):
1331 return util.makedate()
1338 return util.makedate()
1332
1339
1333 def subrev(self, subpath):
1340 def subrev(self, subpath):
1334 return None
1341 return None
1335
1342
1336 def manifestnode(self):
1343 def manifestnode(self):
1337 return None
1344 return None
1338 def user(self):
1345 def user(self):
1339 return self._user or self._repo.ui.username()
1346 return self._user or self._repo.ui.username()
1340 def date(self):
1347 def date(self):
1341 return self._date
1348 return self._date
1342 def description(self):
1349 def description(self):
1343 return self._text
1350 return self._text
1344 def files(self):
1351 def files(self):
1345 return sorted(self._status.modified + self._status.added +
1352 return sorted(self._status.modified + self._status.added +
1346 self._status.removed)
1353 self._status.removed)
1347
1354
1348 def modified(self):
1355 def modified(self):
1349 return self._status.modified
1356 return self._status.modified
1350 def added(self):
1357 def added(self):
1351 return self._status.added
1358 return self._status.added
1352 def removed(self):
1359 def removed(self):
1353 return self._status.removed
1360 return self._status.removed
1354 def deleted(self):
1361 def deleted(self):
1355 return self._status.deleted
1362 return self._status.deleted
1356 def branch(self):
1363 def branch(self):
1357 return encoding.tolocal(self._extra['branch'])
1364 return encoding.tolocal(self._extra['branch'])
1358 def closesbranch(self):
1365 def closesbranch(self):
1359 return 'close' in self._extra
1366 return 'close' in self._extra
1360 def extra(self):
1367 def extra(self):
1361 return self._extra
1368 return self._extra
1362
1369
1363 def tags(self):
1370 def tags(self):
1364 return []
1371 return []
1365
1372
1366 def bookmarks(self):
1373 def bookmarks(self):
1367 b = []
1374 b = []
1368 for p in self.parents():
1375 for p in self.parents():
1369 b.extend(p.bookmarks())
1376 b.extend(p.bookmarks())
1370 return b
1377 return b
1371
1378
1372 def phase(self):
1379 def phase(self):
1373 phase = phases.draft # default phase to draft
1380 phase = phases.draft # default phase to draft
1374 for p in self.parents():
1381 for p in self.parents():
1375 phase = max(phase, p.phase())
1382 phase = max(phase, p.phase())
1376 return phase
1383 return phase
1377
1384
1378 def hidden(self):
1385 def hidden(self):
1379 return False
1386 return False
1380
1387
1381 def children(self):
1388 def children(self):
1382 return []
1389 return []
1383
1390
1384 def flags(self, path):
1391 def flags(self, path):
1385 if '_manifest' in self.__dict__:
1392 if '_manifest' in self.__dict__:
1386 try:
1393 try:
1387 return self._manifest.flags(path)
1394 return self._manifest.flags(path)
1388 except KeyError:
1395 except KeyError:
1389 return ''
1396 return ''
1390
1397
1391 try:
1398 try:
1392 return self._flagfunc(path)
1399 return self._flagfunc(path)
1393 except OSError:
1400 except OSError:
1394 return ''
1401 return ''
1395
1402
1396 def ancestor(self, c2):
1403 def ancestor(self, c2):
1397 """return the "best" ancestor context of self and c2"""
1404 """return the "best" ancestor context of self and c2"""
1398 return self._parents[0].ancestor(c2) # punt on two parents for now
1405 return self._parents[0].ancestor(c2) # punt on two parents for now
1399
1406
1400 def walk(self, match):
1407 def walk(self, match):
1401 '''Generates matching file names.'''
1408 '''Generates matching file names.'''
1402 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1409 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1403 True, False))
1410 True, False))
1404
1411
1405 def matches(self, match):
1412 def matches(self, match):
1406 return sorted(self._repo.dirstate.matches(match))
1413 return sorted(self._repo.dirstate.matches(match))
1407
1414
1408 def ancestors(self):
1415 def ancestors(self):
1409 for p in self._parents:
1416 for p in self._parents:
1410 yield p
1417 yield p
1411 for a in self._repo.changelog.ancestors(
1418 for a in self._repo.changelog.ancestors(
1412 [p.rev() for p in self._parents]):
1419 [p.rev() for p in self._parents]):
1413 yield changectx(self._repo, a)
1420 yield changectx(self._repo, a)
1414
1421
1415 def markcommitted(self, node):
1422 def markcommitted(self, node):
1416 """Perform post-commit cleanup necessary after committing this ctx
1423 """Perform post-commit cleanup necessary after committing this ctx
1417
1424
1418 Specifically, this updates backing stores this working context
1425 Specifically, this updates backing stores this working context
1419 wraps to reflect the fact that the changes reflected by this
1426 wraps to reflect the fact that the changes reflected by this
1420 workingctx have been committed. For example, it marks
1427 workingctx have been committed. For example, it marks
1421 modified and added files as normal in the dirstate.
1428 modified and added files as normal in the dirstate.
1422
1429
1423 """
1430 """
1424
1431
1425 self._repo.dirstate.beginparentchange()
1432 self._repo.dirstate.beginparentchange()
1426 for f in self.modified() + self.added():
1433 for f in self.modified() + self.added():
1427 self._repo.dirstate.normal(f)
1434 self._repo.dirstate.normal(f)
1428 for f in self.removed():
1435 for f in self.removed():
1429 self._repo.dirstate.drop(f)
1436 self._repo.dirstate.drop(f)
1430 self._repo.dirstate.setparents(node)
1437 self._repo.dirstate.setparents(node)
1431 self._repo.dirstate.endparentchange()
1438 self._repo.dirstate.endparentchange()
1432
1439
1433 # write changes out explicitly, because nesting wlock at
1440 # write changes out explicitly, because nesting wlock at
1434 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1441 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1435 # from immediately doing so for subsequent changing files
1442 # from immediately doing so for subsequent changing files
1436 self._repo.dirstate.write(self._repo.currenttransaction())
1443 self._repo.dirstate.write(self._repo.currenttransaction())
1437
1444
1438 class workingctx(committablectx):
1445 class workingctx(committablectx):
1439 """A workingctx object makes access to data related to
1446 """A workingctx object makes access to data related to
1440 the current working directory convenient.
1447 the current working directory convenient.
1441 date - any valid date string or (unixtime, offset), or None.
1448 date - any valid date string or (unixtime, offset), or None.
1442 user - username string, or None.
1449 user - username string, or None.
1443 extra - a dictionary of extra values, or None.
1450 extra - a dictionary of extra values, or None.
1444 changes - a list of file lists as returned by localrepo.status()
1451 changes - a list of file lists as returned by localrepo.status()
1445 or None to use the repository status.
1452 or None to use the repository status.
1446 """
1453 """
1447 def __init__(self, repo, text="", user=None, date=None, extra=None,
1454 def __init__(self, repo, text="", user=None, date=None, extra=None,
1448 changes=None):
1455 changes=None):
1449 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1456 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1450
1457
1451 def __iter__(self):
1458 def __iter__(self):
1452 d = self._repo.dirstate
1459 d = self._repo.dirstate
1453 for f in d:
1460 for f in d:
1454 if d[f] != 'r':
1461 if d[f] != 'r':
1455 yield f
1462 yield f
1456
1463
1457 def __contains__(self, key):
1464 def __contains__(self, key):
1458 return self._repo.dirstate[key] not in "?r"
1465 return self._repo.dirstate[key] not in "?r"
1459
1466
1460 def hex(self):
1467 def hex(self):
1461 return hex(wdirid)
1468 return hex(wdirid)
1462
1469
1463 @propertycache
1470 @propertycache
1464 def _parents(self):
1471 def _parents(self):
1465 p = self._repo.dirstate.parents()
1472 p = self._repo.dirstate.parents()
1466 if p[1] == nullid:
1473 if p[1] == nullid:
1467 p = p[:-1]
1474 p = p[:-1]
1468 return [changectx(self._repo, x) for x in p]
1475 return [changectx(self._repo, x) for x in p]
1469
1476
1470 def filectx(self, path, filelog=None):
1477 def filectx(self, path, filelog=None):
1471 """get a file context from the working directory"""
1478 """get a file context from the working directory"""
1472 return workingfilectx(self._repo, path, workingctx=self,
1479 return workingfilectx(self._repo, path, workingctx=self,
1473 filelog=filelog)
1480 filelog=filelog)
1474
1481
1475 def dirty(self, missing=False, merge=True, branch=True):
1482 def dirty(self, missing=False, merge=True, branch=True):
1476 "check whether a working directory is modified"
1483 "check whether a working directory is modified"
1477 # check subrepos first
1484 # check subrepos first
1478 for s in sorted(self.substate):
1485 for s in sorted(self.substate):
1479 if self.sub(s).dirty():
1486 if self.sub(s).dirty():
1480 return True
1487 return True
1481 # check current working dir
1488 # check current working dir
1482 return ((merge and self.p2()) or
1489 return ((merge and self.p2()) or
1483 (branch and self.branch() != self.p1().branch()) or
1490 (branch and self.branch() != self.p1().branch()) or
1484 self.modified() or self.added() or self.removed() or
1491 self.modified() or self.added() or self.removed() or
1485 (missing and self.deleted()))
1492 (missing and self.deleted()))
1486
1493
1487 def add(self, list, prefix=""):
1494 def add(self, list, prefix=""):
1488 join = lambda f: os.path.join(prefix, f)
1495 join = lambda f: os.path.join(prefix, f)
1489 with self._repo.wlock():
1496 with self._repo.wlock():
1490 ui, ds = self._repo.ui, self._repo.dirstate
1497 ui, ds = self._repo.ui, self._repo.dirstate
1491 rejected = []
1498 rejected = []
1492 lstat = self._repo.wvfs.lstat
1499 lstat = self._repo.wvfs.lstat
1493 for f in list:
1500 for f in list:
1494 scmutil.checkportable(ui, join(f))
1501 scmutil.checkportable(ui, join(f))
1495 try:
1502 try:
1496 st = lstat(f)
1503 st = lstat(f)
1497 except OSError:
1504 except OSError:
1498 ui.warn(_("%s does not exist!\n") % join(f))
1505 ui.warn(_("%s does not exist!\n") % join(f))
1499 rejected.append(f)
1506 rejected.append(f)
1500 continue
1507 continue
1501 if st.st_size > 10000000:
1508 if st.st_size > 10000000:
1502 ui.warn(_("%s: up to %d MB of RAM may be required "
1509 ui.warn(_("%s: up to %d MB of RAM may be required "
1503 "to manage this file\n"
1510 "to manage this file\n"
1504 "(use 'hg revert %s' to cancel the "
1511 "(use 'hg revert %s' to cancel the "
1505 "pending addition)\n")
1512 "pending addition)\n")
1506 % (f, 3 * st.st_size // 1000000, join(f)))
1513 % (f, 3 * st.st_size // 1000000, join(f)))
1507 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1514 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1508 ui.warn(_("%s not added: only files and symlinks "
1515 ui.warn(_("%s not added: only files and symlinks "
1509 "supported currently\n") % join(f))
1516 "supported currently\n") % join(f))
1510 rejected.append(f)
1517 rejected.append(f)
1511 elif ds[f] in 'amn':
1518 elif ds[f] in 'amn':
1512 ui.warn(_("%s already tracked!\n") % join(f))
1519 ui.warn(_("%s already tracked!\n") % join(f))
1513 elif ds[f] == 'r':
1520 elif ds[f] == 'r':
1514 ds.normallookup(f)
1521 ds.normallookup(f)
1515 else:
1522 else:
1516 ds.add(f)
1523 ds.add(f)
1517 return rejected
1524 return rejected
1518
1525
1519 def forget(self, files, prefix=""):
1526 def forget(self, files, prefix=""):
1520 join = lambda f: os.path.join(prefix, f)
1527 join = lambda f: os.path.join(prefix, f)
1521 with self._repo.wlock():
1528 with self._repo.wlock():
1522 rejected = []
1529 rejected = []
1523 for f in files:
1530 for f in files:
1524 if f not in self._repo.dirstate:
1531 if f not in self._repo.dirstate:
1525 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1532 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1526 rejected.append(f)
1533 rejected.append(f)
1527 elif self._repo.dirstate[f] != 'a':
1534 elif self._repo.dirstate[f] != 'a':
1528 self._repo.dirstate.remove(f)
1535 self._repo.dirstate.remove(f)
1529 else:
1536 else:
1530 self._repo.dirstate.drop(f)
1537 self._repo.dirstate.drop(f)
1531 return rejected
1538 return rejected
1532
1539
1533 def undelete(self, list):
1540 def undelete(self, list):
1534 pctxs = self.parents()
1541 pctxs = self.parents()
1535 with self._repo.wlock():
1542 with self._repo.wlock():
1536 for f in list:
1543 for f in list:
1537 if self._repo.dirstate[f] != 'r':
1544 if self._repo.dirstate[f] != 'r':
1538 self._repo.ui.warn(_("%s not removed!\n") % f)
1545 self._repo.ui.warn(_("%s not removed!\n") % f)
1539 else:
1546 else:
1540 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1547 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1541 t = fctx.data()
1548 t = fctx.data()
1542 self._repo.wwrite(f, t, fctx.flags())
1549 self._repo.wwrite(f, t, fctx.flags())
1543 self._repo.dirstate.normal(f)
1550 self._repo.dirstate.normal(f)
1544
1551
1545 def copy(self, source, dest):
1552 def copy(self, source, dest):
1546 try:
1553 try:
1547 st = self._repo.wvfs.lstat(dest)
1554 st = self._repo.wvfs.lstat(dest)
1548 except OSError as err:
1555 except OSError as err:
1549 if err.errno != errno.ENOENT:
1556 if err.errno != errno.ENOENT:
1550 raise
1557 raise
1551 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1558 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1552 return
1559 return
1553 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1560 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1554 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1561 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1555 "symbolic link\n") % dest)
1562 "symbolic link\n") % dest)
1556 else:
1563 else:
1557 with self._repo.wlock():
1564 with self._repo.wlock():
1558 if self._repo.dirstate[dest] in '?':
1565 if self._repo.dirstate[dest] in '?':
1559 self._repo.dirstate.add(dest)
1566 self._repo.dirstate.add(dest)
1560 elif self._repo.dirstate[dest] in 'r':
1567 elif self._repo.dirstate[dest] in 'r':
1561 self._repo.dirstate.normallookup(dest)
1568 self._repo.dirstate.normallookup(dest)
1562 self._repo.dirstate.copy(source, dest)
1569 self._repo.dirstate.copy(source, dest)
1563
1570
1564 def match(self, pats=None, include=None, exclude=None, default='glob',
1571 def match(self, pats=None, include=None, exclude=None, default='glob',
1565 listsubrepos=False, badfn=None):
1572 listsubrepos=False, badfn=None):
1566 if pats is None:
1573 if pats is None:
1567 pats = []
1574 pats = []
1568 r = self._repo
1575 r = self._repo
1569
1576
1570 # Only a case insensitive filesystem needs magic to translate user input
1577 # Only a case insensitive filesystem needs magic to translate user input
1571 # to actual case in the filesystem.
1578 # to actual case in the filesystem.
1572 matcherfunc = matchmod.match
1579 matcherfunc = matchmod.match
1573 if not util.fscasesensitive(r.root):
1580 if not util.fscasesensitive(r.root):
1574 matcherfunc = matchmod.icasefsmatcher
1581 matcherfunc = matchmod.icasefsmatcher
1575 return matcherfunc(r.root, r.getcwd(), pats,
1582 return matcherfunc(r.root, r.getcwd(), pats,
1576 include, exclude, default,
1583 include, exclude, default,
1577 auditor=r.auditor, ctx=self,
1584 auditor=r.auditor, ctx=self,
1578 listsubrepos=listsubrepos, badfn=badfn)
1585 listsubrepos=listsubrepos, badfn=badfn)
1579
1586
1580 def _filtersuspectsymlink(self, files):
1587 def _filtersuspectsymlink(self, files):
1581 if not files or self._repo.dirstate._checklink:
1588 if not files or self._repo.dirstate._checklink:
1582 return files
1589 return files
1583
1590
1584 # Symlink placeholders may get non-symlink-like contents
1591 # Symlink placeholders may get non-symlink-like contents
1585 # via user error or dereferencing by NFS or Samba servers,
1592 # via user error or dereferencing by NFS or Samba servers,
1586 # so we filter out any placeholders that don't look like a
1593 # so we filter out any placeholders that don't look like a
1587 # symlink
1594 # symlink
1588 sane = []
1595 sane = []
1589 for f in files:
1596 for f in files:
1590 if self.flags(f) == 'l':
1597 if self.flags(f) == 'l':
1591 d = self[f].data()
1598 d = self[f].data()
1592 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1599 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1593 self._repo.ui.debug('ignoring suspect symlink placeholder'
1600 self._repo.ui.debug('ignoring suspect symlink placeholder'
1594 ' "%s"\n' % f)
1601 ' "%s"\n' % f)
1595 continue
1602 continue
1596 sane.append(f)
1603 sane.append(f)
1597 return sane
1604 return sane
1598
1605
1599 def _checklookup(self, files):
1606 def _checklookup(self, files):
1600 # check for any possibly clean files
1607 # check for any possibly clean files
1601 if not files:
1608 if not files:
1602 return [], []
1609 return [], []
1603
1610
1604 modified = []
1611 modified = []
1605 fixup = []
1612 fixup = []
1606 pctx = self._parents[0]
1613 pctx = self._parents[0]
1607 # do a full compare of any files that might have changed
1614 # do a full compare of any files that might have changed
1608 for f in sorted(files):
1615 for f in sorted(files):
1609 if (f not in pctx or self.flags(f) != pctx.flags(f)
1616 if (f not in pctx or self.flags(f) != pctx.flags(f)
1610 or pctx[f].cmp(self[f])):
1617 or pctx[f].cmp(self[f])):
1611 modified.append(f)
1618 modified.append(f)
1612 else:
1619 else:
1613 fixup.append(f)
1620 fixup.append(f)
1614
1621
1615 # update dirstate for files that are actually clean
1622 # update dirstate for files that are actually clean
1616 if fixup:
1623 if fixup:
1617 try:
1624 try:
1618 # updating the dirstate is optional
1625 # updating the dirstate is optional
1619 # so we don't wait on the lock
1626 # so we don't wait on the lock
1620 # wlock can invalidate the dirstate, so cache normal _after_
1627 # wlock can invalidate the dirstate, so cache normal _after_
1621 # taking the lock
1628 # taking the lock
1622 with self._repo.wlock(False):
1629 with self._repo.wlock(False):
1623 normal = self._repo.dirstate.normal
1630 normal = self._repo.dirstate.normal
1624 for f in fixup:
1631 for f in fixup:
1625 normal(f)
1632 normal(f)
1626 # write changes out explicitly, because nesting
1633 # write changes out explicitly, because nesting
1627 # wlock at runtime may prevent 'wlock.release()'
1634 # wlock at runtime may prevent 'wlock.release()'
1628 # after this block from doing so for subsequent
1635 # after this block from doing so for subsequent
1629 # changing files
1636 # changing files
1630 self._repo.dirstate.write(self._repo.currenttransaction())
1637 self._repo.dirstate.write(self._repo.currenttransaction())
1631 except error.LockError:
1638 except error.LockError:
1632 pass
1639 pass
1633 return modified, fixup
1640 return modified, fixup
1634
1641
1635 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1642 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1636 unknown=False):
1643 unknown=False):
1637 '''Gets the status from the dirstate -- internal use only.'''
1644 '''Gets the status from the dirstate -- internal use only.'''
1638 listignored, listclean, listunknown = ignored, clean, unknown
1645 listignored, listclean, listunknown = ignored, clean, unknown
1639 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1646 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1640 subrepos = []
1647 subrepos = []
1641 if '.hgsub' in self:
1648 if '.hgsub' in self:
1642 subrepos = sorted(self.substate)
1649 subrepos = sorted(self.substate)
1643 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1650 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1644 listclean, listunknown)
1651 listclean, listunknown)
1645
1652
1646 # check for any possibly clean files
1653 # check for any possibly clean files
1647 if cmp:
1654 if cmp:
1648 modified2, fixup = self._checklookup(cmp)
1655 modified2, fixup = self._checklookup(cmp)
1649 s.modified.extend(modified2)
1656 s.modified.extend(modified2)
1650
1657
1651 # update dirstate for files that are actually clean
1658 # update dirstate for files that are actually clean
1652 if fixup and listclean:
1659 if fixup and listclean:
1653 s.clean.extend(fixup)
1660 s.clean.extend(fixup)
1654
1661
1655 if match.always():
1662 if match.always():
1656 # cache for performance
1663 # cache for performance
1657 if s.unknown or s.ignored or s.clean:
1664 if s.unknown or s.ignored or s.clean:
1658 # "_status" is cached with list*=False in the normal route
1665 # "_status" is cached with list*=False in the normal route
1659 self._status = scmutil.status(s.modified, s.added, s.removed,
1666 self._status = scmutil.status(s.modified, s.added, s.removed,
1660 s.deleted, [], [], [])
1667 s.deleted, [], [], [])
1661 else:
1668 else:
1662 self._status = s
1669 self._status = s
1663
1670
1664 return s
1671 return s
1665
1672
1666 @propertycache
1673 @propertycache
1667 def _manifest(self):
1674 def _manifest(self):
1668 """generate a manifest corresponding to the values in self._status
1675 """generate a manifest corresponding to the values in self._status
1669
1676
1670 This reuse the file nodeid from parent, but we use special node
1677 This reuse the file nodeid from parent, but we use special node
1671 identifiers for added and modified files. This is used by manifests
1678 identifiers for added and modified files. This is used by manifests
1672 merge to see that files are different and by update logic to avoid
1679 merge to see that files are different and by update logic to avoid
1673 deleting newly added files.
1680 deleting newly added files.
1674 """
1681 """
1675 return self._buildstatusmanifest(self._status)
1682 return self._buildstatusmanifest(self._status)
1676
1683
1677 def _buildstatusmanifest(self, status):
1684 def _buildstatusmanifest(self, status):
1678 """Builds a manifest that includes the given status results."""
1685 """Builds a manifest that includes the given status results."""
1679 parents = self.parents()
1686 parents = self.parents()
1680
1687
1681 man = parents[0].manifest().copy()
1688 man = parents[0].manifest().copy()
1682
1689
1683 ff = self._flagfunc
1690 ff = self._flagfunc
1684 for i, l in ((addednodeid, status.added),
1691 for i, l in ((addednodeid, status.added),
1685 (modifiednodeid, status.modified)):
1692 (modifiednodeid, status.modified)):
1686 for f in l:
1693 for f in l:
1687 man[f] = i
1694 man[f] = i
1688 try:
1695 try:
1689 man.setflag(f, ff(f))
1696 man.setflag(f, ff(f))
1690 except OSError:
1697 except OSError:
1691 pass
1698 pass
1692
1699
1693 for f in status.deleted + status.removed:
1700 for f in status.deleted + status.removed:
1694 if f in man:
1701 if f in man:
1695 del man[f]
1702 del man[f]
1696
1703
1697 return man
1704 return man
1698
1705
1699 def _buildstatus(self, other, s, match, listignored, listclean,
1706 def _buildstatus(self, other, s, match, listignored, listclean,
1700 listunknown):
1707 listunknown):
1701 """build a status with respect to another context
1708 """build a status with respect to another context
1702
1709
1703 This includes logic for maintaining the fast path of status when
1710 This includes logic for maintaining the fast path of status when
1704 comparing the working directory against its parent, which is to skip
1711 comparing the working directory against its parent, which is to skip
1705 building a new manifest if self (working directory) is not comparing
1712 building a new manifest if self (working directory) is not comparing
1706 against its parent (repo['.']).
1713 against its parent (repo['.']).
1707 """
1714 """
1708 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1715 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1709 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1716 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1710 # might have accidentally ended up with the entire contents of the file
1717 # might have accidentally ended up with the entire contents of the file
1711 # they are supposed to be linking to.
1718 # they are supposed to be linking to.
1712 s.modified[:] = self._filtersuspectsymlink(s.modified)
1719 s.modified[:] = self._filtersuspectsymlink(s.modified)
1713 if other != self._repo['.']:
1720 if other != self._repo['.']:
1714 s = super(workingctx, self)._buildstatus(other, s, match,
1721 s = super(workingctx, self)._buildstatus(other, s, match,
1715 listignored, listclean,
1722 listignored, listclean,
1716 listunknown)
1723 listunknown)
1717 return s
1724 return s
1718
1725
1719 def _matchstatus(self, other, match):
1726 def _matchstatus(self, other, match):
1720 """override the match method with a filter for directory patterns
1727 """override the match method with a filter for directory patterns
1721
1728
1722 We use inheritance to customize the match.bad method only in cases of
1729 We use inheritance to customize the match.bad method only in cases of
1723 workingctx since it belongs only to the working directory when
1730 workingctx since it belongs only to the working directory when
1724 comparing against the parent changeset.
1731 comparing against the parent changeset.
1725
1732
1726 If we aren't comparing against the working directory's parent, then we
1733 If we aren't comparing against the working directory's parent, then we
1727 just use the default match object sent to us.
1734 just use the default match object sent to us.
1728 """
1735 """
1729 superself = super(workingctx, self)
1736 superself = super(workingctx, self)
1730 match = superself._matchstatus(other, match)
1737 match = superself._matchstatus(other, match)
1731 if other != self._repo['.']:
1738 if other != self._repo['.']:
1732 def bad(f, msg):
1739 def bad(f, msg):
1733 # 'f' may be a directory pattern from 'match.files()',
1740 # 'f' may be a directory pattern from 'match.files()',
1734 # so 'f not in ctx1' is not enough
1741 # so 'f not in ctx1' is not enough
1735 if f not in other and not other.hasdir(f):
1742 if f not in other and not other.hasdir(f):
1736 self._repo.ui.warn('%s: %s\n' %
1743 self._repo.ui.warn('%s: %s\n' %
1737 (self._repo.dirstate.pathto(f), msg))
1744 (self._repo.dirstate.pathto(f), msg))
1738 match.bad = bad
1745 match.bad = bad
1739 return match
1746 return match
1740
1747
1741 class committablefilectx(basefilectx):
1748 class committablefilectx(basefilectx):
1742 """A committablefilectx provides common functionality for a file context
1749 """A committablefilectx provides common functionality for a file context
1743 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1750 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1744 def __init__(self, repo, path, filelog=None, ctx=None):
1751 def __init__(self, repo, path, filelog=None, ctx=None):
1745 self._repo = repo
1752 self._repo = repo
1746 self._path = path
1753 self._path = path
1747 self._changeid = None
1754 self._changeid = None
1748 self._filerev = self._filenode = None
1755 self._filerev = self._filenode = None
1749
1756
1750 if filelog is not None:
1757 if filelog is not None:
1751 self._filelog = filelog
1758 self._filelog = filelog
1752 if ctx:
1759 if ctx:
1753 self._changectx = ctx
1760 self._changectx = ctx
1754
1761
1755 def __nonzero__(self):
1762 def __nonzero__(self):
1756 return True
1763 return True
1757
1764
1758 __bool__ = __nonzero__
1765 __bool__ = __nonzero__
1759
1766
1760 def linkrev(self):
1767 def linkrev(self):
1761 # linked to self._changectx no matter if file is modified or not
1768 # linked to self._changectx no matter if file is modified or not
1762 return self.rev()
1769 return self.rev()
1763
1770
1764 def parents(self):
1771 def parents(self):
1765 '''return parent filectxs, following copies if necessary'''
1772 '''return parent filectxs, following copies if necessary'''
1766 def filenode(ctx, path):
1773 def filenode(ctx, path):
1767 return ctx._manifest.get(path, nullid)
1774 return ctx._manifest.get(path, nullid)
1768
1775
1769 path = self._path
1776 path = self._path
1770 fl = self._filelog
1777 fl = self._filelog
1771 pcl = self._changectx._parents
1778 pcl = self._changectx._parents
1772 renamed = self.renamed()
1779 renamed = self.renamed()
1773
1780
1774 if renamed:
1781 if renamed:
1775 pl = [renamed + (None,)]
1782 pl = [renamed + (None,)]
1776 else:
1783 else:
1777 pl = [(path, filenode(pcl[0], path), fl)]
1784 pl = [(path, filenode(pcl[0], path), fl)]
1778
1785
1779 for pc in pcl[1:]:
1786 for pc in pcl[1:]:
1780 pl.append((path, filenode(pc, path), fl))
1787 pl.append((path, filenode(pc, path), fl))
1781
1788
1782 return [self._parentfilectx(p, fileid=n, filelog=l)
1789 return [self._parentfilectx(p, fileid=n, filelog=l)
1783 for p, n, l in pl if n != nullid]
1790 for p, n, l in pl if n != nullid]
1784
1791
1785 def children(self):
1792 def children(self):
1786 return []
1793 return []
1787
1794
1788 class workingfilectx(committablefilectx):
1795 class workingfilectx(committablefilectx):
1789 """A workingfilectx object makes access to data related to a particular
1796 """A workingfilectx object makes access to data related to a particular
1790 file in the working directory convenient."""
1797 file in the working directory convenient."""
1791 def __init__(self, repo, path, filelog=None, workingctx=None):
1798 def __init__(self, repo, path, filelog=None, workingctx=None):
1792 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1799 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1793
1800
1794 @propertycache
1801 @propertycache
1795 def _changectx(self):
1802 def _changectx(self):
1796 return workingctx(self._repo)
1803 return workingctx(self._repo)
1797
1804
1798 def data(self):
1805 def data(self):
1799 return self._repo.wread(self._path)
1806 return self._repo.wread(self._path)
1800 def renamed(self):
1807 def renamed(self):
1801 rp = self._repo.dirstate.copied(self._path)
1808 rp = self._repo.dirstate.copied(self._path)
1802 if not rp:
1809 if not rp:
1803 return None
1810 return None
1804 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1811 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1805
1812
1806 def size(self):
1813 def size(self):
1807 return self._repo.wvfs.lstat(self._path).st_size
1814 return self._repo.wvfs.lstat(self._path).st_size
1808 def date(self):
1815 def date(self):
1809 t, tz = self._changectx.date()
1816 t, tz = self._changectx.date()
1810 try:
1817 try:
1811 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1818 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1812 except OSError as err:
1819 except OSError as err:
1813 if err.errno != errno.ENOENT:
1820 if err.errno != errno.ENOENT:
1814 raise
1821 raise
1815 return (t, tz)
1822 return (t, tz)
1816
1823
1817 def cmp(self, fctx):
1824 def cmp(self, fctx):
1818 """compare with other file context
1825 """compare with other file context
1819
1826
1820 returns True if different than fctx.
1827 returns True if different than fctx.
1821 """
1828 """
1822 # fctx should be a filectx (not a workingfilectx)
1829 # fctx should be a filectx (not a workingfilectx)
1823 # invert comparison to reuse the same code path
1830 # invert comparison to reuse the same code path
1824 return fctx.cmp(self)
1831 return fctx.cmp(self)
1825
1832
1826 def remove(self, ignoremissing=False):
1833 def remove(self, ignoremissing=False):
1827 """wraps unlink for a repo's working directory"""
1834 """wraps unlink for a repo's working directory"""
1828 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1835 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1829
1836
1830 def write(self, data, flags):
1837 def write(self, data, flags):
1831 """wraps repo.wwrite"""
1838 """wraps repo.wwrite"""
1832 self._repo.wwrite(self._path, data, flags)
1839 self._repo.wwrite(self._path, data, flags)
1833
1840
1834 class workingcommitctx(workingctx):
1841 class workingcommitctx(workingctx):
1835 """A workingcommitctx object makes access to data related to
1842 """A workingcommitctx object makes access to data related to
1836 the revision being committed convenient.
1843 the revision being committed convenient.
1837
1844
1838 This hides changes in the working directory, if they aren't
1845 This hides changes in the working directory, if they aren't
1839 committed in this context.
1846 committed in this context.
1840 """
1847 """
1841 def __init__(self, repo, changes,
1848 def __init__(self, repo, changes,
1842 text="", user=None, date=None, extra=None):
1849 text="", user=None, date=None, extra=None):
1843 super(workingctx, self).__init__(repo, text, user, date, extra,
1850 super(workingctx, self).__init__(repo, text, user, date, extra,
1844 changes)
1851 changes)
1845
1852
1846 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1853 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1847 unknown=False):
1854 unknown=False):
1848 """Return matched files only in ``self._status``
1855 """Return matched files only in ``self._status``
1849
1856
1850 Uncommitted files appear "clean" via this context, even if
1857 Uncommitted files appear "clean" via this context, even if
1851 they aren't actually so in the working directory.
1858 they aren't actually so in the working directory.
1852 """
1859 """
1853 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1860 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1854 if clean:
1861 if clean:
1855 clean = [f for f in self._manifest if f not in self._changedset]
1862 clean = [f for f in self._manifest if f not in self._changedset]
1856 else:
1863 else:
1857 clean = []
1864 clean = []
1858 return scmutil.status([f for f in self._status.modified if match(f)],
1865 return scmutil.status([f for f in self._status.modified if match(f)],
1859 [f for f in self._status.added if match(f)],
1866 [f for f in self._status.added if match(f)],
1860 [f for f in self._status.removed if match(f)],
1867 [f for f in self._status.removed if match(f)],
1861 [], [], [], clean)
1868 [], [], [], clean)
1862
1869
1863 @propertycache
1870 @propertycache
1864 def _changedset(self):
1871 def _changedset(self):
1865 """Return the set of files changed in this context
1872 """Return the set of files changed in this context
1866 """
1873 """
1867 changed = set(self._status.modified)
1874 changed = set(self._status.modified)
1868 changed.update(self._status.added)
1875 changed.update(self._status.added)
1869 changed.update(self._status.removed)
1876 changed.update(self._status.removed)
1870 return changed
1877 return changed
1871
1878
1872 def makecachingfilectxfn(func):
1879 def makecachingfilectxfn(func):
1873 """Create a filectxfn that caches based on the path.
1880 """Create a filectxfn that caches based on the path.
1874
1881
1875 We can't use util.cachefunc because it uses all arguments as the cache
1882 We can't use util.cachefunc because it uses all arguments as the cache
1876 key and this creates a cycle since the arguments include the repo and
1883 key and this creates a cycle since the arguments include the repo and
1877 memctx.
1884 memctx.
1878 """
1885 """
1879 cache = {}
1886 cache = {}
1880
1887
1881 def getfilectx(repo, memctx, path):
1888 def getfilectx(repo, memctx, path):
1882 if path not in cache:
1889 if path not in cache:
1883 cache[path] = func(repo, memctx, path)
1890 cache[path] = func(repo, memctx, path)
1884 return cache[path]
1891 return cache[path]
1885
1892
1886 return getfilectx
1893 return getfilectx
1887
1894
1888 class memctx(committablectx):
1895 class memctx(committablectx):
1889 """Use memctx to perform in-memory commits via localrepo.commitctx().
1896 """Use memctx to perform in-memory commits via localrepo.commitctx().
1890
1897
1891 Revision information is supplied at initialization time while
1898 Revision information is supplied at initialization time while
1892 related files data and is made available through a callback
1899 related files data and is made available through a callback
1893 mechanism. 'repo' is the current localrepo, 'parents' is a
1900 mechanism. 'repo' is the current localrepo, 'parents' is a
1894 sequence of two parent revisions identifiers (pass None for every
1901 sequence of two parent revisions identifiers (pass None for every
1895 missing parent), 'text' is the commit message and 'files' lists
1902 missing parent), 'text' is the commit message and 'files' lists
1896 names of files touched by the revision (normalized and relative to
1903 names of files touched by the revision (normalized and relative to
1897 repository root).
1904 repository root).
1898
1905
1899 filectxfn(repo, memctx, path) is a callable receiving the
1906 filectxfn(repo, memctx, path) is a callable receiving the
1900 repository, the current memctx object and the normalized path of
1907 repository, the current memctx object and the normalized path of
1901 requested file, relative to repository root. It is fired by the
1908 requested file, relative to repository root. It is fired by the
1902 commit function for every file in 'files', but calls order is
1909 commit function for every file in 'files', but calls order is
1903 undefined. If the file is available in the revision being
1910 undefined. If the file is available in the revision being
1904 committed (updated or added), filectxfn returns a memfilectx
1911 committed (updated or added), filectxfn returns a memfilectx
1905 object. If the file was removed, filectxfn return None for recent
1912 object. If the file was removed, filectxfn return None for recent
1906 Mercurial. Moved files are represented by marking the source file
1913 Mercurial. Moved files are represented by marking the source file
1907 removed and the new file added with copy information (see
1914 removed and the new file added with copy information (see
1908 memfilectx).
1915 memfilectx).
1909
1916
1910 user receives the committer name and defaults to current
1917 user receives the committer name and defaults to current
1911 repository username, date is the commit date in any format
1918 repository username, date is the commit date in any format
1912 supported by util.parsedate() and defaults to current date, extra
1919 supported by util.parsedate() and defaults to current date, extra
1913 is a dictionary of metadata or is left empty.
1920 is a dictionary of metadata or is left empty.
1914 """
1921 """
1915
1922
1916 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1923 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1917 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1924 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1918 # this field to determine what to do in filectxfn.
1925 # this field to determine what to do in filectxfn.
1919 _returnnoneformissingfiles = True
1926 _returnnoneformissingfiles = True
1920
1927
1921 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1928 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1922 date=None, extra=None, editor=False):
1929 date=None, extra=None, editor=False):
1923 super(memctx, self).__init__(repo, text, user, date, extra)
1930 super(memctx, self).__init__(repo, text, user, date, extra)
1924 self._rev = None
1931 self._rev = None
1925 self._node = None
1932 self._node = None
1926 parents = [(p or nullid) for p in parents]
1933 parents = [(p or nullid) for p in parents]
1927 p1, p2 = parents
1934 p1, p2 = parents
1928 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1935 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1929 files = sorted(set(files))
1936 files = sorted(set(files))
1930 self._files = files
1937 self._files = files
1931 self.substate = {}
1938 self.substate = {}
1932
1939
1933 # if store is not callable, wrap it in a function
1940 # if store is not callable, wrap it in a function
1934 if not callable(filectxfn):
1941 if not callable(filectxfn):
1935 def getfilectx(repo, memctx, path):
1942 def getfilectx(repo, memctx, path):
1936 fctx = filectxfn[path]
1943 fctx = filectxfn[path]
1937 # this is weird but apparently we only keep track of one parent
1944 # this is weird but apparently we only keep track of one parent
1938 # (why not only store that instead of a tuple?)
1945 # (why not only store that instead of a tuple?)
1939 copied = fctx.renamed()
1946 copied = fctx.renamed()
1940 if copied:
1947 if copied:
1941 copied = copied[0]
1948 copied = copied[0]
1942 return memfilectx(repo, path, fctx.data(),
1949 return memfilectx(repo, path, fctx.data(),
1943 islink=fctx.islink(), isexec=fctx.isexec(),
1950 islink=fctx.islink(), isexec=fctx.isexec(),
1944 copied=copied, memctx=memctx)
1951 copied=copied, memctx=memctx)
1945 self._filectxfn = getfilectx
1952 self._filectxfn = getfilectx
1946 else:
1953 else:
1947 # memoizing increases performance for e.g. vcs convert scenarios.
1954 # memoizing increases performance for e.g. vcs convert scenarios.
1948 self._filectxfn = makecachingfilectxfn(filectxfn)
1955 self._filectxfn = makecachingfilectxfn(filectxfn)
1949
1956
1950 if extra:
1957 if extra:
1951 self._extra = extra.copy()
1958 self._extra = extra.copy()
1952 else:
1959 else:
1953 self._extra = {}
1960 self._extra = {}
1954
1961
1955 if self._extra.get('branch', '') == '':
1962 if self._extra.get('branch', '') == '':
1956 self._extra['branch'] = 'default'
1963 self._extra['branch'] = 'default'
1957
1964
1958 if editor:
1965 if editor:
1959 self._text = editor(self._repo, self, [])
1966 self._text = editor(self._repo, self, [])
1960 self._repo.savecommitmessage(self._text)
1967 self._repo.savecommitmessage(self._text)
1961
1968
1962 def filectx(self, path, filelog=None):
1969 def filectx(self, path, filelog=None):
1963 """get a file context from the working directory
1970 """get a file context from the working directory
1964
1971
1965 Returns None if file doesn't exist and should be removed."""
1972 Returns None if file doesn't exist and should be removed."""
1966 return self._filectxfn(self._repo, self, path)
1973 return self._filectxfn(self._repo, self, path)
1967
1974
1968 def commit(self):
1975 def commit(self):
1969 """commit context to the repo"""
1976 """commit context to the repo"""
1970 return self._repo.commitctx(self)
1977 return self._repo.commitctx(self)
1971
1978
1972 @propertycache
1979 @propertycache
1973 def _manifest(self):
1980 def _manifest(self):
1974 """generate a manifest based on the return values of filectxfn"""
1981 """generate a manifest based on the return values of filectxfn"""
1975
1982
1976 # keep this simple for now; just worry about p1
1983 # keep this simple for now; just worry about p1
1977 pctx = self._parents[0]
1984 pctx = self._parents[0]
1978 man = pctx.manifest().copy()
1985 man = pctx.manifest().copy()
1979
1986
1980 for f in self._status.modified:
1987 for f in self._status.modified:
1981 p1node = nullid
1988 p1node = nullid
1982 p2node = nullid
1989 p2node = nullid
1983 p = pctx[f].parents() # if file isn't in pctx, check p2?
1990 p = pctx[f].parents() # if file isn't in pctx, check p2?
1984 if len(p) > 0:
1991 if len(p) > 0:
1985 p1node = p[0].filenode()
1992 p1node = p[0].filenode()
1986 if len(p) > 1:
1993 if len(p) > 1:
1987 p2node = p[1].filenode()
1994 p2node = p[1].filenode()
1988 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1995 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1989
1996
1990 for f in self._status.added:
1997 for f in self._status.added:
1991 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1998 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1992
1999
1993 for f in self._status.removed:
2000 for f in self._status.removed:
1994 if f in man:
2001 if f in man:
1995 del man[f]
2002 del man[f]
1996
2003
1997 return man
2004 return man
1998
2005
1999 @propertycache
2006 @propertycache
2000 def _status(self):
2007 def _status(self):
2001 """Calculate exact status from ``files`` specified at construction
2008 """Calculate exact status from ``files`` specified at construction
2002 """
2009 """
2003 man1 = self.p1().manifest()
2010 man1 = self.p1().manifest()
2004 p2 = self._parents[1]
2011 p2 = self._parents[1]
2005 # "1 < len(self._parents)" can't be used for checking
2012 # "1 < len(self._parents)" can't be used for checking
2006 # existence of the 2nd parent, because "memctx._parents" is
2013 # existence of the 2nd parent, because "memctx._parents" is
2007 # explicitly initialized by the list, of which length is 2.
2014 # explicitly initialized by the list, of which length is 2.
2008 if p2.node() != nullid:
2015 if p2.node() != nullid:
2009 man2 = p2.manifest()
2016 man2 = p2.manifest()
2010 managing = lambda f: f in man1 or f in man2
2017 managing = lambda f: f in man1 or f in man2
2011 else:
2018 else:
2012 managing = lambda f: f in man1
2019 managing = lambda f: f in man1
2013
2020
2014 modified, added, removed = [], [], []
2021 modified, added, removed = [], [], []
2015 for f in self._files:
2022 for f in self._files:
2016 if not managing(f):
2023 if not managing(f):
2017 added.append(f)
2024 added.append(f)
2018 elif self[f]:
2025 elif self[f]:
2019 modified.append(f)
2026 modified.append(f)
2020 else:
2027 else:
2021 removed.append(f)
2028 removed.append(f)
2022
2029
2023 return scmutil.status(modified, added, removed, [], [], [], [])
2030 return scmutil.status(modified, added, removed, [], [], [], [])
2024
2031
2025 class memfilectx(committablefilectx):
2032 class memfilectx(committablefilectx):
2026 """memfilectx represents an in-memory file to commit.
2033 """memfilectx represents an in-memory file to commit.
2027
2034
2028 See memctx and committablefilectx for more details.
2035 See memctx and committablefilectx for more details.
2029 """
2036 """
2030 def __init__(self, repo, path, data, islink=False,
2037 def __init__(self, repo, path, data, islink=False,
2031 isexec=False, copied=None, memctx=None):
2038 isexec=False, copied=None, memctx=None):
2032 """
2039 """
2033 path is the normalized file path relative to repository root.
2040 path is the normalized file path relative to repository root.
2034 data is the file content as a string.
2041 data is the file content as a string.
2035 islink is True if the file is a symbolic link.
2042 islink is True if the file is a symbolic link.
2036 isexec is True if the file is executable.
2043 isexec is True if the file is executable.
2037 copied is the source file path if current file was copied in the
2044 copied is the source file path if current file was copied in the
2038 revision being committed, or None."""
2045 revision being committed, or None."""
2039 super(memfilectx, self).__init__(repo, path, None, memctx)
2046 super(memfilectx, self).__init__(repo, path, None, memctx)
2040 self._data = data
2047 self._data = data
2041 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2048 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2042 self._copied = None
2049 self._copied = None
2043 if copied:
2050 if copied:
2044 self._copied = (copied, nullid)
2051 self._copied = (copied, nullid)
2045
2052
2046 def data(self):
2053 def data(self):
2047 return self._data
2054 return self._data
2048 def size(self):
2055 def size(self):
2049 return len(self.data())
2056 return len(self.data())
2050 def flags(self):
2057 def flags(self):
2051 return self._flags
2058 return self._flags
2052 def renamed(self):
2059 def renamed(self):
2053 return self._copied
2060 return self._copied
2054
2061
2055 def remove(self, ignoremissing=False):
2062 def remove(self, ignoremissing=False):
2056 """wraps unlink for a repo's working directory"""
2063 """wraps unlink for a repo's working directory"""
2057 # need to figure out what to do here
2064 # need to figure out what to do here
2058 del self._changectx[self._path]
2065 del self._changectx[self._path]
2059
2066
2060 def write(self, data, flags):
2067 def write(self, data, flags):
2061 """wraps repo.wwrite"""
2068 """wraps repo.wwrite"""
2062 self._data = data
2069 self._data = data
2063
2070
2064 class metadataonlyctx(committablectx):
2071 class metadataonlyctx(committablectx):
2065 """Like memctx but it's reusing the manifest of different commit.
2072 """Like memctx but it's reusing the manifest of different commit.
2066 Intended to be used by lightweight operations that are creating
2073 Intended to be used by lightweight operations that are creating
2067 metadata-only changes.
2074 metadata-only changes.
2068
2075
2069 Revision information is supplied at initialization time. 'repo' is the
2076 Revision information is supplied at initialization time. 'repo' is the
2070 current localrepo, 'ctx' is original revision which manifest we're reuisng
2077 current localrepo, 'ctx' is original revision which manifest we're reuisng
2071 'parents' is a sequence of two parent revisions identifiers (pass None for
2078 'parents' is a sequence of two parent revisions identifiers (pass None for
2072 every missing parent), 'text' is the commit.
2079 every missing parent), 'text' is the commit.
2073
2080
2074 user receives the committer name and defaults to current repository
2081 user receives the committer name and defaults to current repository
2075 username, date is the commit date in any format supported by
2082 username, date is the commit date in any format supported by
2076 util.parsedate() and defaults to current date, extra is a dictionary of
2083 util.parsedate() and defaults to current date, extra is a dictionary of
2077 metadata or is left empty.
2084 metadata or is left empty.
2078 """
2085 """
2079 def __new__(cls, repo, originalctx, *args, **kwargs):
2086 def __new__(cls, repo, originalctx, *args, **kwargs):
2080 return super(metadataonlyctx, cls).__new__(cls, repo)
2087 return super(metadataonlyctx, cls).__new__(cls, repo)
2081
2088
2082 def __init__(self, repo, originalctx, parents, text, user=None, date=None,
2089 def __init__(self, repo, originalctx, parents, text, user=None, date=None,
2083 extra=None, editor=False):
2090 extra=None, editor=False):
2084 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2091 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2085 self._rev = None
2092 self._rev = None
2086 self._node = None
2093 self._node = None
2087 self._originalctx = originalctx
2094 self._originalctx = originalctx
2088 self._manifestnode = originalctx.manifestnode()
2095 self._manifestnode = originalctx.manifestnode()
2089 parents = [(p or nullid) for p in parents]
2096 parents = [(p or nullid) for p in parents]
2090 p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
2097 p1, p2 = self._parents = [changectx(self._repo, p) for p in parents]
2091
2098
2092 # sanity check to ensure that the reused manifest parents are
2099 # sanity check to ensure that the reused manifest parents are
2093 # manifests of our commit parents
2100 # manifests of our commit parents
2094 mp1, mp2 = self.manifestctx().parents
2101 mp1, mp2 = self.manifestctx().parents
2095 if p1 != nullid and p1.manifestnode() != mp1:
2102 if p1 != nullid and p1.manifestnode() != mp1:
2096 raise RuntimeError('can\'t reuse the manifest: '
2103 raise RuntimeError('can\'t reuse the manifest: '
2097 'its p1 doesn\'t match the new ctx p1')
2104 'its p1 doesn\'t match the new ctx p1')
2098 if p2 != nullid and p2.manifestnode() != mp2:
2105 if p2 != nullid and p2.manifestnode() != mp2:
2099 raise RuntimeError('can\'t reuse the manifest: '
2106 raise RuntimeError('can\'t reuse the manifest: '
2100 'its p2 doesn\'t match the new ctx p2')
2107 'its p2 doesn\'t match the new ctx p2')
2101
2108
2102 self._files = originalctx.files()
2109 self._files = originalctx.files()
2103 self.substate = {}
2110 self.substate = {}
2104
2111
2105 if extra:
2112 if extra:
2106 self._extra = extra.copy()
2113 self._extra = extra.copy()
2107 else:
2114 else:
2108 self._extra = {}
2115 self._extra = {}
2109
2116
2110 if self._extra.get('branch', '') == '':
2117 if self._extra.get('branch', '') == '':
2111 self._extra['branch'] = 'default'
2118 self._extra['branch'] = 'default'
2112
2119
2113 if editor:
2120 if editor:
2114 self._text = editor(self._repo, self, [])
2121 self._text = editor(self._repo, self, [])
2115 self._repo.savecommitmessage(self._text)
2122 self._repo.savecommitmessage(self._text)
2116
2123
2117 def manifestnode(self):
2124 def manifestnode(self):
2118 return self._manifestnode
2125 return self._manifestnode
2119
2126
2120 @propertycache
2127 @propertycache
2121 def _manifestctx(self):
2128 def _manifestctx(self):
2122 return self._repo.manifestlog[self._manifestnode]
2129 return self._repo.manifestlog[self._manifestnode]
2123
2130
2124 def filectx(self, path, filelog=None):
2131 def filectx(self, path, filelog=None):
2125 return self._originalctx.filectx(path, filelog=filelog)
2132 return self._originalctx.filectx(path, filelog=filelog)
2126
2133
2127 def commit(self):
2134 def commit(self):
2128 """commit context to the repo"""
2135 """commit context to the repo"""
2129 return self._repo.commitctx(self)
2136 return self._repo.commitctx(self)
2130
2137
2131 @property
2138 @property
2132 def _manifest(self):
2139 def _manifest(self):
2133 return self._originalctx.manifest()
2140 return self._originalctx.manifest()
2134
2141
2135 @propertycache
2142 @propertycache
2136 def _status(self):
2143 def _status(self):
2137 """Calculate exact status from ``files`` specified in the ``origctx``
2144 """Calculate exact status from ``files`` specified in the ``origctx``
2138 and parents manifests.
2145 and parents manifests.
2139 """
2146 """
2140 man1 = self.p1().manifest()
2147 man1 = self.p1().manifest()
2141 p2 = self._parents[1]
2148 p2 = self._parents[1]
2142 # "1 < len(self._parents)" can't be used for checking
2149 # "1 < len(self._parents)" can't be used for checking
2143 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2150 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2144 # explicitly initialized by the list, of which length is 2.
2151 # explicitly initialized by the list, of which length is 2.
2145 if p2.node() != nullid:
2152 if p2.node() != nullid:
2146 man2 = p2.manifest()
2153 man2 = p2.manifest()
2147 managing = lambda f: f in man1 or f in man2
2154 managing = lambda f: f in man1 or f in man2
2148 else:
2155 else:
2149 managing = lambda f: f in man1
2156 managing = lambda f: f in man1
2150
2157
2151 modified, added, removed = [], [], []
2158 modified, added, removed = [], [], []
2152 for f in self._files:
2159 for f in self._files:
2153 if not managing(f):
2160 if not managing(f):
2154 added.append(f)
2161 added.append(f)
2155 elif self[f]:
2162 elif self[f]:
2156 modified.append(f)
2163 modified.append(f)
2157 else:
2164 else:
2158 removed.append(f)
2165 removed.append(f)
2159
2166
2160 return scmutil.status(modified, added, removed, [], [], [], [])
2167 return scmutil.status(modified, added, removed, [], [], [], [])
General Comments 0
You need to be logged in to leave comments. Login now