##// END OF EJS Templates
filectx: add isabsent method...
Siddharth Agarwal -
r26978:9b9d4bcc default
parent child Browse files
Show More
@@ -1,1937 +1,1944
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import re
8 import re
9
9
10 from node import nullid, nullrev, wdirid, short, hex, bin
10 from node import nullid, nullrev, wdirid, short, hex, bin
11 from i18n import _
11 from i18n import _
12 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
12 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
13 import match as matchmod
13 import match as matchmod
14 import os, errno, stat
14 import os, errno, stat
15 import obsolete as obsmod
15 import obsolete as obsmod
16 import repoview
16 import repoview
17 import fileset
17 import fileset
18 import revlog
18 import revlog
19
19
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21
21
22 # Phony node value to stand-in for new files in some uses of
22 # Phony node value to stand-in for new files in some uses of
23 # manifests. Manifests support 21-byte hashes for nodes which are
23 # manifests. Manifests support 21-byte hashes for nodes which are
24 # dirty in the working copy.
24 # dirty in the working copy.
25 _newnode = '!' * 21
25 _newnode = '!' * 21
26
26
27 nonascii = re.compile(r'[^\x21-\x7f]').search
27 nonascii = re.compile(r'[^\x21-\x7f]').search
28
28
29 class basectx(object):
29 class basectx(object):
30 """A basectx object represents the common logic for its children:
30 """A basectx object represents the common logic for its children:
31 changectx: read-only context that is already present in the repo,
31 changectx: read-only context that is already present in the repo,
32 workingctx: a context that represents the working directory and can
32 workingctx: a context that represents the working directory and can
33 be committed,
33 be committed,
34 memctx: a context that represents changes in-memory and can also
34 memctx: a context that represents changes in-memory and can also
35 be committed."""
35 be committed."""
36 def __new__(cls, repo, changeid='', *args, **kwargs):
36 def __new__(cls, repo, changeid='', *args, **kwargs):
37 if isinstance(changeid, basectx):
37 if isinstance(changeid, basectx):
38 return changeid
38 return changeid
39
39
40 o = super(basectx, cls).__new__(cls)
40 o = super(basectx, cls).__new__(cls)
41
41
42 o._repo = repo
42 o._repo = repo
43 o._rev = nullrev
43 o._rev = nullrev
44 o._node = nullid
44 o._node = nullid
45
45
46 return o
46 return o
47
47
48 def __str__(self):
48 def __str__(self):
49 return short(self.node())
49 return short(self.node())
50
50
51 def __int__(self):
51 def __int__(self):
52 return self.rev()
52 return self.rev()
53
53
54 def __repr__(self):
54 def __repr__(self):
55 return "<%s %s>" % (type(self).__name__, str(self))
55 return "<%s %s>" % (type(self).__name__, str(self))
56
56
57 def __eq__(self, other):
57 def __eq__(self, other):
58 try:
58 try:
59 return type(self) == type(other) and self._rev == other._rev
59 return type(self) == type(other) and self._rev == other._rev
60 except AttributeError:
60 except AttributeError:
61 return False
61 return False
62
62
63 def __ne__(self, other):
63 def __ne__(self, other):
64 return not (self == other)
64 return not (self == other)
65
65
66 def __contains__(self, key):
66 def __contains__(self, key):
67 return key in self._manifest
67 return key in self._manifest
68
68
69 def __getitem__(self, key):
69 def __getitem__(self, key):
70 return self.filectx(key)
70 return self.filectx(key)
71
71
72 def __iter__(self):
72 def __iter__(self):
73 return iter(self._manifest)
73 return iter(self._manifest)
74
74
75 def _manifestmatches(self, match, s):
75 def _manifestmatches(self, match, s):
76 """generate a new manifest filtered by the match argument
76 """generate a new manifest filtered by the match argument
77
77
78 This method is for internal use only and mainly exists to provide an
78 This method is for internal use only and mainly exists to provide an
79 object oriented way for other contexts to customize the manifest
79 object oriented way for other contexts to customize the manifest
80 generation.
80 generation.
81 """
81 """
82 return self.manifest().matches(match)
82 return self.manifest().matches(match)
83
83
84 def _matchstatus(self, other, match):
84 def _matchstatus(self, other, match):
85 """return match.always if match is none
85 """return match.always if match is none
86
86
87 This internal method provides a way for child objects to override the
87 This internal method provides a way for child objects to override the
88 match operator.
88 match operator.
89 """
89 """
90 return match or matchmod.always(self._repo.root, self._repo.getcwd())
90 return match or matchmod.always(self._repo.root, self._repo.getcwd())
91
91
92 def _buildstatus(self, other, s, match, listignored, listclean,
92 def _buildstatus(self, other, s, match, listignored, listclean,
93 listunknown):
93 listunknown):
94 """build a status with respect to another context"""
94 """build a status with respect to another context"""
95 # Load earliest manifest first for caching reasons. More specifically,
95 # Load earliest manifest first for caching reasons. More specifically,
96 # if you have revisions 1000 and 1001, 1001 is probably stored as a
96 # if you have revisions 1000 and 1001, 1001 is probably stored as a
97 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
97 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
98 # 1000 and cache it so that when you read 1001, we just need to apply a
98 # 1000 and cache it so that when you read 1001, we just need to apply a
99 # delta to what's in the cache. So that's one full reconstruction + one
99 # delta to what's in the cache. So that's one full reconstruction + one
100 # delta application.
100 # delta application.
101 if self.rev() is not None and self.rev() < other.rev():
101 if self.rev() is not None and self.rev() < other.rev():
102 self.manifest()
102 self.manifest()
103 mf1 = other._manifestmatches(match, s)
103 mf1 = other._manifestmatches(match, s)
104 mf2 = self._manifestmatches(match, s)
104 mf2 = self._manifestmatches(match, s)
105
105
106 modified, added = [], []
106 modified, added = [], []
107 removed = []
107 removed = []
108 clean = []
108 clean = []
109 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
109 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
110 deletedset = set(deleted)
110 deletedset = set(deleted)
111 d = mf1.diff(mf2, clean=listclean)
111 d = mf1.diff(mf2, clean=listclean)
112 for fn, value in d.iteritems():
112 for fn, value in d.iteritems():
113 if fn in deletedset:
113 if fn in deletedset:
114 continue
114 continue
115 if value is None:
115 if value is None:
116 clean.append(fn)
116 clean.append(fn)
117 continue
117 continue
118 (node1, flag1), (node2, flag2) = value
118 (node1, flag1), (node2, flag2) = value
119 if node1 is None:
119 if node1 is None:
120 added.append(fn)
120 added.append(fn)
121 elif node2 is None:
121 elif node2 is None:
122 removed.append(fn)
122 removed.append(fn)
123 elif node2 != _newnode:
123 elif node2 != _newnode:
124 # The file was not a new file in mf2, so an entry
124 # The file was not a new file in mf2, so an entry
125 # from diff is really a difference.
125 # from diff is really a difference.
126 modified.append(fn)
126 modified.append(fn)
127 elif self[fn].cmp(other[fn]):
127 elif self[fn].cmp(other[fn]):
128 # node2 was newnode, but the working file doesn't
128 # node2 was newnode, but the working file doesn't
129 # match the one in mf1.
129 # match the one in mf1.
130 modified.append(fn)
130 modified.append(fn)
131 else:
131 else:
132 clean.append(fn)
132 clean.append(fn)
133
133
134 if removed:
134 if removed:
135 # need to filter files if they are already reported as removed
135 # need to filter files if they are already reported as removed
136 unknown = [fn for fn in unknown if fn not in mf1]
136 unknown = [fn for fn in unknown if fn not in mf1]
137 ignored = [fn for fn in ignored if fn not in mf1]
137 ignored = [fn for fn in ignored if fn not in mf1]
138 # if they're deleted, don't report them as removed
138 # if they're deleted, don't report them as removed
139 removed = [fn for fn in removed if fn not in deletedset]
139 removed = [fn for fn in removed if fn not in deletedset]
140
140
141 return scmutil.status(modified, added, removed, deleted, unknown,
141 return scmutil.status(modified, added, removed, deleted, unknown,
142 ignored, clean)
142 ignored, clean)
143
143
144 @propertycache
144 @propertycache
145 def substate(self):
145 def substate(self):
146 return subrepo.state(self, self._repo.ui)
146 return subrepo.state(self, self._repo.ui)
147
147
148 def subrev(self, subpath):
148 def subrev(self, subpath):
149 return self.substate[subpath][1]
149 return self.substate[subpath][1]
150
150
151 def rev(self):
151 def rev(self):
152 return self._rev
152 return self._rev
153 def node(self):
153 def node(self):
154 return self._node
154 return self._node
155 def hex(self):
155 def hex(self):
156 return hex(self.node())
156 return hex(self.node())
157 def manifest(self):
157 def manifest(self):
158 return self._manifest
158 return self._manifest
159 def repo(self):
159 def repo(self):
160 return self._repo
160 return self._repo
161 def phasestr(self):
161 def phasestr(self):
162 return phases.phasenames[self.phase()]
162 return phases.phasenames[self.phase()]
163 def mutable(self):
163 def mutable(self):
164 return self.phase() > phases.public
164 return self.phase() > phases.public
165
165
166 def getfileset(self, expr):
166 def getfileset(self, expr):
167 return fileset.getfileset(self, expr)
167 return fileset.getfileset(self, expr)
168
168
169 def obsolete(self):
169 def obsolete(self):
170 """True if the changeset is obsolete"""
170 """True if the changeset is obsolete"""
171 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
171 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
172
172
173 def extinct(self):
173 def extinct(self):
174 """True if the changeset is extinct"""
174 """True if the changeset is extinct"""
175 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
175 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
176
176
177 def unstable(self):
177 def unstable(self):
178 """True if the changeset is not obsolete but it's ancestor are"""
178 """True if the changeset is not obsolete but it's ancestor are"""
179 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
179 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
180
180
181 def bumped(self):
181 def bumped(self):
182 """True if the changeset try to be a successor of a public changeset
182 """True if the changeset try to be a successor of a public changeset
183
183
184 Only non-public and non-obsolete changesets may be bumped.
184 Only non-public and non-obsolete changesets may be bumped.
185 """
185 """
186 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
186 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
187
187
188 def divergent(self):
188 def divergent(self):
189 """Is a successors of a changeset with multiple possible successors set
189 """Is a successors of a changeset with multiple possible successors set
190
190
191 Only non-public and non-obsolete changesets may be divergent.
191 Only non-public and non-obsolete changesets may be divergent.
192 """
192 """
193 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
193 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
194
194
195 def troubled(self):
195 def troubled(self):
196 """True if the changeset is either unstable, bumped or divergent"""
196 """True if the changeset is either unstable, bumped or divergent"""
197 return self.unstable() or self.bumped() or self.divergent()
197 return self.unstable() or self.bumped() or self.divergent()
198
198
199 def troubles(self):
199 def troubles(self):
200 """return the list of troubles affecting this changesets.
200 """return the list of troubles affecting this changesets.
201
201
202 Troubles are returned as strings. possible values are:
202 Troubles are returned as strings. possible values are:
203 - unstable,
203 - unstable,
204 - bumped,
204 - bumped,
205 - divergent.
205 - divergent.
206 """
206 """
207 troubles = []
207 troubles = []
208 if self.unstable():
208 if self.unstable():
209 troubles.append('unstable')
209 troubles.append('unstable')
210 if self.bumped():
210 if self.bumped():
211 troubles.append('bumped')
211 troubles.append('bumped')
212 if self.divergent():
212 if self.divergent():
213 troubles.append('divergent')
213 troubles.append('divergent')
214 return troubles
214 return troubles
215
215
216 def parents(self):
216 def parents(self):
217 """return contexts for each parent changeset"""
217 """return contexts for each parent changeset"""
218 return self._parents
218 return self._parents
219
219
220 def p1(self):
220 def p1(self):
221 return self._parents[0]
221 return self._parents[0]
222
222
223 def p2(self):
223 def p2(self):
224 if len(self._parents) == 2:
224 if len(self._parents) == 2:
225 return self._parents[1]
225 return self._parents[1]
226 return changectx(self._repo, -1)
226 return changectx(self._repo, -1)
227
227
228 def _fileinfo(self, path):
228 def _fileinfo(self, path):
229 if '_manifest' in self.__dict__:
229 if '_manifest' in self.__dict__:
230 try:
230 try:
231 return self._manifest[path], self._manifest.flags(path)
231 return self._manifest[path], self._manifest.flags(path)
232 except KeyError:
232 except KeyError:
233 raise error.ManifestLookupError(self._node, path,
233 raise error.ManifestLookupError(self._node, path,
234 _('not found in manifest'))
234 _('not found in manifest'))
235 if '_manifestdelta' in self.__dict__ or path in self.files():
235 if '_manifestdelta' in self.__dict__ or path in self.files():
236 if path in self._manifestdelta:
236 if path in self._manifestdelta:
237 return (self._manifestdelta[path],
237 return (self._manifestdelta[path],
238 self._manifestdelta.flags(path))
238 self._manifestdelta.flags(path))
239 node, flag = self._repo.manifest.find(self._changeset[0], path)
239 node, flag = self._repo.manifest.find(self._changeset[0], path)
240 if not node:
240 if not node:
241 raise error.ManifestLookupError(self._node, path,
241 raise error.ManifestLookupError(self._node, path,
242 _('not found in manifest'))
242 _('not found in manifest'))
243
243
244 return node, flag
244 return node, flag
245
245
246 def filenode(self, path):
246 def filenode(self, path):
247 return self._fileinfo(path)[0]
247 return self._fileinfo(path)[0]
248
248
249 def flags(self, path):
249 def flags(self, path):
250 try:
250 try:
251 return self._fileinfo(path)[1]
251 return self._fileinfo(path)[1]
252 except error.LookupError:
252 except error.LookupError:
253 return ''
253 return ''
254
254
255 def sub(self, path):
255 def sub(self, path):
256 '''return a subrepo for the stored revision of path, never wdir()'''
256 '''return a subrepo for the stored revision of path, never wdir()'''
257 return subrepo.subrepo(self, path)
257 return subrepo.subrepo(self, path)
258
258
259 def nullsub(self, path, pctx):
259 def nullsub(self, path, pctx):
260 return subrepo.nullsubrepo(self, path, pctx)
260 return subrepo.nullsubrepo(self, path, pctx)
261
261
262 def workingsub(self, path):
262 def workingsub(self, path):
263 '''return a subrepo for the stored revision, or wdir if this is a wdir
263 '''return a subrepo for the stored revision, or wdir if this is a wdir
264 context.
264 context.
265 '''
265 '''
266 return subrepo.subrepo(self, path, allowwdir=True)
266 return subrepo.subrepo(self, path, allowwdir=True)
267
267
268 def match(self, pats=[], include=None, exclude=None, default='glob',
268 def match(self, pats=[], include=None, exclude=None, default='glob',
269 listsubrepos=False, badfn=None):
269 listsubrepos=False, badfn=None):
270 r = self._repo
270 r = self._repo
271 return matchmod.match(r.root, r.getcwd(), pats,
271 return matchmod.match(r.root, r.getcwd(), pats,
272 include, exclude, default,
272 include, exclude, default,
273 auditor=r.auditor, ctx=self,
273 auditor=r.auditor, ctx=self,
274 listsubrepos=listsubrepos, badfn=badfn)
274 listsubrepos=listsubrepos, badfn=badfn)
275
275
276 def diff(self, ctx2=None, match=None, **opts):
276 def diff(self, ctx2=None, match=None, **opts):
277 """Returns a diff generator for the given contexts and matcher"""
277 """Returns a diff generator for the given contexts and matcher"""
278 if ctx2 is None:
278 if ctx2 is None:
279 ctx2 = self.p1()
279 ctx2 = self.p1()
280 if ctx2 is not None:
280 if ctx2 is not None:
281 ctx2 = self._repo[ctx2]
281 ctx2 = self._repo[ctx2]
282 diffopts = patch.diffopts(self._repo.ui, opts)
282 diffopts = patch.diffopts(self._repo.ui, opts)
283 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
283 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
284
284
285 def dirs(self):
285 def dirs(self):
286 return self._manifest.dirs()
286 return self._manifest.dirs()
287
287
288 def hasdir(self, dir):
288 def hasdir(self, dir):
289 return self._manifest.hasdir(dir)
289 return self._manifest.hasdir(dir)
290
290
291 def dirty(self, missing=False, merge=True, branch=True):
291 def dirty(self, missing=False, merge=True, branch=True):
292 return False
292 return False
293
293
294 def status(self, other=None, match=None, listignored=False,
294 def status(self, other=None, match=None, listignored=False,
295 listclean=False, listunknown=False, listsubrepos=False):
295 listclean=False, listunknown=False, listsubrepos=False):
296 """return status of files between two nodes or node and working
296 """return status of files between two nodes or node and working
297 directory.
297 directory.
298
298
299 If other is None, compare this node with working directory.
299 If other is None, compare this node with working directory.
300
300
301 returns (modified, added, removed, deleted, unknown, ignored, clean)
301 returns (modified, added, removed, deleted, unknown, ignored, clean)
302 """
302 """
303
303
304 ctx1 = self
304 ctx1 = self
305 ctx2 = self._repo[other]
305 ctx2 = self._repo[other]
306
306
307 # This next code block is, admittedly, fragile logic that tests for
307 # This next code block is, admittedly, fragile logic that tests for
308 # reversing the contexts and wouldn't need to exist if it weren't for
308 # reversing the contexts and wouldn't need to exist if it weren't for
309 # the fast (and common) code path of comparing the working directory
309 # the fast (and common) code path of comparing the working directory
310 # with its first parent.
310 # with its first parent.
311 #
311 #
312 # What we're aiming for here is the ability to call:
312 # What we're aiming for here is the ability to call:
313 #
313 #
314 # workingctx.status(parentctx)
314 # workingctx.status(parentctx)
315 #
315 #
316 # If we always built the manifest for each context and compared those,
316 # If we always built the manifest for each context and compared those,
317 # then we'd be done. But the special case of the above call means we
317 # then we'd be done. But the special case of the above call means we
318 # just copy the manifest of the parent.
318 # just copy the manifest of the parent.
319 reversed = False
319 reversed = False
320 if (not isinstance(ctx1, changectx)
320 if (not isinstance(ctx1, changectx)
321 and isinstance(ctx2, changectx)):
321 and isinstance(ctx2, changectx)):
322 reversed = True
322 reversed = True
323 ctx1, ctx2 = ctx2, ctx1
323 ctx1, ctx2 = ctx2, ctx1
324
324
325 match = ctx2._matchstatus(ctx1, match)
325 match = ctx2._matchstatus(ctx1, match)
326 r = scmutil.status([], [], [], [], [], [], [])
326 r = scmutil.status([], [], [], [], [], [], [])
327 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
327 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
328 listunknown)
328 listunknown)
329
329
330 if reversed:
330 if reversed:
331 # Reverse added and removed. Clear deleted, unknown and ignored as
331 # Reverse added and removed. Clear deleted, unknown and ignored as
332 # these make no sense to reverse.
332 # these make no sense to reverse.
333 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
333 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
334 r.clean)
334 r.clean)
335
335
336 if listsubrepos:
336 if listsubrepos:
337 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
337 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
338 rev2 = ctx2.subrev(subpath)
338 rev2 = ctx2.subrev(subpath)
339 try:
339 try:
340 submatch = matchmod.narrowmatcher(subpath, match)
340 submatch = matchmod.narrowmatcher(subpath, match)
341 s = sub.status(rev2, match=submatch, ignored=listignored,
341 s = sub.status(rev2, match=submatch, ignored=listignored,
342 clean=listclean, unknown=listunknown,
342 clean=listclean, unknown=listunknown,
343 listsubrepos=True)
343 listsubrepos=True)
344 for rfiles, sfiles in zip(r, s):
344 for rfiles, sfiles in zip(r, s):
345 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
345 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
346 except error.LookupError:
346 except error.LookupError:
347 self._repo.ui.status(_("skipping missing "
347 self._repo.ui.status(_("skipping missing "
348 "subrepository: %s\n") % subpath)
348 "subrepository: %s\n") % subpath)
349
349
350 for l in r:
350 for l in r:
351 l.sort()
351 l.sort()
352
352
353 return r
353 return r
354
354
355
355
356 def makememctx(repo, parents, text, user, date, branch, files, store,
356 def makememctx(repo, parents, text, user, date, branch, files, store,
357 editor=None, extra=None):
357 editor=None, extra=None):
358 def getfilectx(repo, memctx, path):
358 def getfilectx(repo, memctx, path):
359 data, mode, copied = store.getfile(path)
359 data, mode, copied = store.getfile(path)
360 if data is None:
360 if data is None:
361 return None
361 return None
362 islink, isexec = mode
362 islink, isexec = mode
363 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
363 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
364 copied=copied, memctx=memctx)
364 copied=copied, memctx=memctx)
365 if extra is None:
365 if extra is None:
366 extra = {}
366 extra = {}
367 if branch:
367 if branch:
368 extra['branch'] = encoding.fromlocal(branch)
368 extra['branch'] = encoding.fromlocal(branch)
369 ctx = memctx(repo, parents, text, files, getfilectx, user,
369 ctx = memctx(repo, parents, text, files, getfilectx, user,
370 date, extra, editor)
370 date, extra, editor)
371 return ctx
371 return ctx
372
372
373 class changectx(basectx):
373 class changectx(basectx):
374 """A changecontext object makes access to data related to a particular
374 """A changecontext object makes access to data related to a particular
375 changeset convenient. It represents a read-only context already present in
375 changeset convenient. It represents a read-only context already present in
376 the repo."""
376 the repo."""
377 def __init__(self, repo, changeid=''):
377 def __init__(self, repo, changeid=''):
378 """changeid is a revision number, node, or tag"""
378 """changeid is a revision number, node, or tag"""
379
379
380 # since basectx.__new__ already took care of copying the object, we
380 # since basectx.__new__ already took care of copying the object, we
381 # don't need to do anything in __init__, so we just exit here
381 # don't need to do anything in __init__, so we just exit here
382 if isinstance(changeid, basectx):
382 if isinstance(changeid, basectx):
383 return
383 return
384
384
385 if changeid == '':
385 if changeid == '':
386 changeid = '.'
386 changeid = '.'
387 self._repo = repo
387 self._repo = repo
388
388
389 try:
389 try:
390 if isinstance(changeid, int):
390 if isinstance(changeid, int):
391 self._node = repo.changelog.node(changeid)
391 self._node = repo.changelog.node(changeid)
392 self._rev = changeid
392 self._rev = changeid
393 return
393 return
394 if isinstance(changeid, long):
394 if isinstance(changeid, long):
395 changeid = str(changeid)
395 changeid = str(changeid)
396 if changeid == 'null':
396 if changeid == 'null':
397 self._node = nullid
397 self._node = nullid
398 self._rev = nullrev
398 self._rev = nullrev
399 return
399 return
400 if changeid == 'tip':
400 if changeid == 'tip':
401 self._node = repo.changelog.tip()
401 self._node = repo.changelog.tip()
402 self._rev = repo.changelog.rev(self._node)
402 self._rev = repo.changelog.rev(self._node)
403 return
403 return
404 if changeid == '.' or changeid == repo.dirstate.p1():
404 if changeid == '.' or changeid == repo.dirstate.p1():
405 # this is a hack to delay/avoid loading obsmarkers
405 # this is a hack to delay/avoid loading obsmarkers
406 # when we know that '.' won't be hidden
406 # when we know that '.' won't be hidden
407 self._node = repo.dirstate.p1()
407 self._node = repo.dirstate.p1()
408 self._rev = repo.unfiltered().changelog.rev(self._node)
408 self._rev = repo.unfiltered().changelog.rev(self._node)
409 return
409 return
410 if len(changeid) == 20:
410 if len(changeid) == 20:
411 try:
411 try:
412 self._node = changeid
412 self._node = changeid
413 self._rev = repo.changelog.rev(changeid)
413 self._rev = repo.changelog.rev(changeid)
414 return
414 return
415 except error.FilteredRepoLookupError:
415 except error.FilteredRepoLookupError:
416 raise
416 raise
417 except LookupError:
417 except LookupError:
418 pass
418 pass
419
419
420 try:
420 try:
421 r = int(changeid)
421 r = int(changeid)
422 if str(r) != changeid:
422 if str(r) != changeid:
423 raise ValueError
423 raise ValueError
424 l = len(repo.changelog)
424 l = len(repo.changelog)
425 if r < 0:
425 if r < 0:
426 r += l
426 r += l
427 if r < 0 or r >= l:
427 if r < 0 or r >= l:
428 raise ValueError
428 raise ValueError
429 self._rev = r
429 self._rev = r
430 self._node = repo.changelog.node(r)
430 self._node = repo.changelog.node(r)
431 return
431 return
432 except error.FilteredIndexError:
432 except error.FilteredIndexError:
433 raise
433 raise
434 except (ValueError, OverflowError, IndexError):
434 except (ValueError, OverflowError, IndexError):
435 pass
435 pass
436
436
437 if len(changeid) == 40:
437 if len(changeid) == 40:
438 try:
438 try:
439 self._node = bin(changeid)
439 self._node = bin(changeid)
440 self._rev = repo.changelog.rev(self._node)
440 self._rev = repo.changelog.rev(self._node)
441 return
441 return
442 except error.FilteredLookupError:
442 except error.FilteredLookupError:
443 raise
443 raise
444 except (TypeError, LookupError):
444 except (TypeError, LookupError):
445 pass
445 pass
446
446
447 # lookup bookmarks through the name interface
447 # lookup bookmarks through the name interface
448 try:
448 try:
449 self._node = repo.names.singlenode(repo, changeid)
449 self._node = repo.names.singlenode(repo, changeid)
450 self._rev = repo.changelog.rev(self._node)
450 self._rev = repo.changelog.rev(self._node)
451 return
451 return
452 except KeyError:
452 except KeyError:
453 pass
453 pass
454 except error.FilteredRepoLookupError:
454 except error.FilteredRepoLookupError:
455 raise
455 raise
456 except error.RepoLookupError:
456 except error.RepoLookupError:
457 pass
457 pass
458
458
459 self._node = repo.unfiltered().changelog._partialmatch(changeid)
459 self._node = repo.unfiltered().changelog._partialmatch(changeid)
460 if self._node is not None:
460 if self._node is not None:
461 self._rev = repo.changelog.rev(self._node)
461 self._rev = repo.changelog.rev(self._node)
462 return
462 return
463
463
464 # lookup failed
464 # lookup failed
465 # check if it might have come from damaged dirstate
465 # check if it might have come from damaged dirstate
466 #
466 #
467 # XXX we could avoid the unfiltered if we had a recognizable
467 # XXX we could avoid the unfiltered if we had a recognizable
468 # exception for filtered changeset access
468 # exception for filtered changeset access
469 if changeid in repo.unfiltered().dirstate.parents():
469 if changeid in repo.unfiltered().dirstate.parents():
470 msg = _("working directory has unknown parent '%s'!")
470 msg = _("working directory has unknown parent '%s'!")
471 raise error.Abort(msg % short(changeid))
471 raise error.Abort(msg % short(changeid))
472 try:
472 try:
473 if len(changeid) == 20 and nonascii(changeid):
473 if len(changeid) == 20 and nonascii(changeid):
474 changeid = hex(changeid)
474 changeid = hex(changeid)
475 except TypeError:
475 except TypeError:
476 pass
476 pass
477 except (error.FilteredIndexError, error.FilteredLookupError,
477 except (error.FilteredIndexError, error.FilteredLookupError,
478 error.FilteredRepoLookupError):
478 error.FilteredRepoLookupError):
479 if repo.filtername.startswith('visible'):
479 if repo.filtername.startswith('visible'):
480 msg = _("hidden revision '%s'") % changeid
480 msg = _("hidden revision '%s'") % changeid
481 hint = _('use --hidden to access hidden revisions')
481 hint = _('use --hidden to access hidden revisions')
482 raise error.FilteredRepoLookupError(msg, hint=hint)
482 raise error.FilteredRepoLookupError(msg, hint=hint)
483 msg = _("filtered revision '%s' (not in '%s' subset)")
483 msg = _("filtered revision '%s' (not in '%s' subset)")
484 msg %= (changeid, repo.filtername)
484 msg %= (changeid, repo.filtername)
485 raise error.FilteredRepoLookupError(msg)
485 raise error.FilteredRepoLookupError(msg)
486 except IndexError:
486 except IndexError:
487 pass
487 pass
488 raise error.RepoLookupError(
488 raise error.RepoLookupError(
489 _("unknown revision '%s'") % changeid)
489 _("unknown revision '%s'") % changeid)
490
490
491 def __hash__(self):
491 def __hash__(self):
492 try:
492 try:
493 return hash(self._rev)
493 return hash(self._rev)
494 except AttributeError:
494 except AttributeError:
495 return id(self)
495 return id(self)
496
496
497 def __nonzero__(self):
497 def __nonzero__(self):
498 return self._rev != nullrev
498 return self._rev != nullrev
499
499
500 @propertycache
500 @propertycache
501 def _changeset(self):
501 def _changeset(self):
502 return self._repo.changelog.read(self.rev())
502 return self._repo.changelog.read(self.rev())
503
503
504 @propertycache
504 @propertycache
505 def _manifest(self):
505 def _manifest(self):
506 return self._repo.manifest.read(self._changeset[0])
506 return self._repo.manifest.read(self._changeset[0])
507
507
508 @propertycache
508 @propertycache
509 def _manifestdelta(self):
509 def _manifestdelta(self):
510 return self._repo.manifest.readdelta(self._changeset[0])
510 return self._repo.manifest.readdelta(self._changeset[0])
511
511
512 @propertycache
512 @propertycache
513 def _parents(self):
513 def _parents(self):
514 p = self._repo.changelog.parentrevs(self._rev)
514 p = self._repo.changelog.parentrevs(self._rev)
515 if p[1] == nullrev:
515 if p[1] == nullrev:
516 p = p[:-1]
516 p = p[:-1]
517 return [changectx(self._repo, x) for x in p]
517 return [changectx(self._repo, x) for x in p]
518
518
519 def changeset(self):
519 def changeset(self):
520 return self._changeset
520 return self._changeset
521 def manifestnode(self):
521 def manifestnode(self):
522 return self._changeset[0]
522 return self._changeset[0]
523
523
524 def user(self):
524 def user(self):
525 return self._changeset[1]
525 return self._changeset[1]
526 def date(self):
526 def date(self):
527 return self._changeset[2]
527 return self._changeset[2]
528 def files(self):
528 def files(self):
529 return self._changeset[3]
529 return self._changeset[3]
530 def description(self):
530 def description(self):
531 return self._changeset[4]
531 return self._changeset[4]
532 def branch(self):
532 def branch(self):
533 return encoding.tolocal(self._changeset[5].get("branch"))
533 return encoding.tolocal(self._changeset[5].get("branch"))
534 def closesbranch(self):
534 def closesbranch(self):
535 return 'close' in self._changeset[5]
535 return 'close' in self._changeset[5]
536 def extra(self):
536 def extra(self):
537 return self._changeset[5]
537 return self._changeset[5]
538 def tags(self):
538 def tags(self):
539 return self._repo.nodetags(self._node)
539 return self._repo.nodetags(self._node)
540 def bookmarks(self):
540 def bookmarks(self):
541 return self._repo.nodebookmarks(self._node)
541 return self._repo.nodebookmarks(self._node)
542 def phase(self):
542 def phase(self):
543 return self._repo._phasecache.phase(self._repo, self._rev)
543 return self._repo._phasecache.phase(self._repo, self._rev)
544 def hidden(self):
544 def hidden(self):
545 return self._rev in repoview.filterrevs(self._repo, 'visible')
545 return self._rev in repoview.filterrevs(self._repo, 'visible')
546
546
547 def children(self):
547 def children(self):
548 """return contexts for each child changeset"""
548 """return contexts for each child changeset"""
549 c = self._repo.changelog.children(self._node)
549 c = self._repo.changelog.children(self._node)
550 return [changectx(self._repo, x) for x in c]
550 return [changectx(self._repo, x) for x in c]
551
551
552 def ancestors(self):
552 def ancestors(self):
553 for a in self._repo.changelog.ancestors([self._rev]):
553 for a in self._repo.changelog.ancestors([self._rev]):
554 yield changectx(self._repo, a)
554 yield changectx(self._repo, a)
555
555
556 def descendants(self):
556 def descendants(self):
557 for d in self._repo.changelog.descendants([self._rev]):
557 for d in self._repo.changelog.descendants([self._rev]):
558 yield changectx(self._repo, d)
558 yield changectx(self._repo, d)
559
559
560 def filectx(self, path, fileid=None, filelog=None):
560 def filectx(self, path, fileid=None, filelog=None):
561 """get a file context from this changeset"""
561 """get a file context from this changeset"""
562 if fileid is None:
562 if fileid is None:
563 fileid = self.filenode(path)
563 fileid = self.filenode(path)
564 return filectx(self._repo, path, fileid=fileid,
564 return filectx(self._repo, path, fileid=fileid,
565 changectx=self, filelog=filelog)
565 changectx=self, filelog=filelog)
566
566
567 def ancestor(self, c2, warn=False):
567 def ancestor(self, c2, warn=False):
568 """return the "best" ancestor context of self and c2
568 """return the "best" ancestor context of self and c2
569
569
570 If there are multiple candidates, it will show a message and check
570 If there are multiple candidates, it will show a message and check
571 merge.preferancestor configuration before falling back to the
571 merge.preferancestor configuration before falling back to the
572 revlog ancestor."""
572 revlog ancestor."""
573 # deal with workingctxs
573 # deal with workingctxs
574 n2 = c2._node
574 n2 = c2._node
575 if n2 is None:
575 if n2 is None:
576 n2 = c2._parents[0]._node
576 n2 = c2._parents[0]._node
577 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
577 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
578 if not cahs:
578 if not cahs:
579 anc = nullid
579 anc = nullid
580 elif len(cahs) == 1:
580 elif len(cahs) == 1:
581 anc = cahs[0]
581 anc = cahs[0]
582 else:
582 else:
583 # experimental config: merge.preferancestor
583 # experimental config: merge.preferancestor
584 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
584 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
585 try:
585 try:
586 ctx = changectx(self._repo, r)
586 ctx = changectx(self._repo, r)
587 except error.RepoLookupError:
587 except error.RepoLookupError:
588 continue
588 continue
589 anc = ctx.node()
589 anc = ctx.node()
590 if anc in cahs:
590 if anc in cahs:
591 break
591 break
592 else:
592 else:
593 anc = self._repo.changelog.ancestor(self._node, n2)
593 anc = self._repo.changelog.ancestor(self._node, n2)
594 if warn:
594 if warn:
595 self._repo.ui.status(
595 self._repo.ui.status(
596 (_("note: using %s as ancestor of %s and %s\n") %
596 (_("note: using %s as ancestor of %s and %s\n") %
597 (short(anc), short(self._node), short(n2))) +
597 (short(anc), short(self._node), short(n2))) +
598 ''.join(_(" alternatively, use --config "
598 ''.join(_(" alternatively, use --config "
599 "merge.preferancestor=%s\n") %
599 "merge.preferancestor=%s\n") %
600 short(n) for n in sorted(cahs) if n != anc))
600 short(n) for n in sorted(cahs) if n != anc))
601 return changectx(self._repo, anc)
601 return changectx(self._repo, anc)
602
602
603 def descendant(self, other):
603 def descendant(self, other):
604 """True if other is descendant of this changeset"""
604 """True if other is descendant of this changeset"""
605 return self._repo.changelog.descendant(self._rev, other._rev)
605 return self._repo.changelog.descendant(self._rev, other._rev)
606
606
607 def walk(self, match):
607 def walk(self, match):
608 '''Generates matching file names.'''
608 '''Generates matching file names.'''
609
609
610 # Wrap match.bad method to have message with nodeid
610 # Wrap match.bad method to have message with nodeid
611 def bad(fn, msg):
611 def bad(fn, msg):
612 # The manifest doesn't know about subrepos, so don't complain about
612 # The manifest doesn't know about subrepos, so don't complain about
613 # paths into valid subrepos.
613 # paths into valid subrepos.
614 if any(fn == s or fn.startswith(s + '/')
614 if any(fn == s or fn.startswith(s + '/')
615 for s in self.substate):
615 for s in self.substate):
616 return
616 return
617 match.bad(fn, _('no such file in rev %s') % self)
617 match.bad(fn, _('no such file in rev %s') % self)
618
618
619 m = matchmod.badmatch(match, bad)
619 m = matchmod.badmatch(match, bad)
620 return self._manifest.walk(m)
620 return self._manifest.walk(m)
621
621
622 def matches(self, match):
622 def matches(self, match):
623 return self.walk(match)
623 return self.walk(match)
624
624
625 class basefilectx(object):
625 class basefilectx(object):
626 """A filecontext object represents the common logic for its children:
626 """A filecontext object represents the common logic for its children:
627 filectx: read-only access to a filerevision that is already present
627 filectx: read-only access to a filerevision that is already present
628 in the repo,
628 in the repo,
629 workingfilectx: a filecontext that represents files from the working
629 workingfilectx: a filecontext that represents files from the working
630 directory,
630 directory,
631 memfilectx: a filecontext that represents files in-memory."""
631 memfilectx: a filecontext that represents files in-memory."""
632 def __new__(cls, repo, path, *args, **kwargs):
632 def __new__(cls, repo, path, *args, **kwargs):
633 return super(basefilectx, cls).__new__(cls)
633 return super(basefilectx, cls).__new__(cls)
634
634
635 @propertycache
635 @propertycache
636 def _filelog(self):
636 def _filelog(self):
637 return self._repo.file(self._path)
637 return self._repo.file(self._path)
638
638
639 @propertycache
639 @propertycache
640 def _changeid(self):
640 def _changeid(self):
641 if '_changeid' in self.__dict__:
641 if '_changeid' in self.__dict__:
642 return self._changeid
642 return self._changeid
643 elif '_changectx' in self.__dict__:
643 elif '_changectx' in self.__dict__:
644 return self._changectx.rev()
644 return self._changectx.rev()
645 elif '_descendantrev' in self.__dict__:
645 elif '_descendantrev' in self.__dict__:
646 # this file context was created from a revision with a known
646 # this file context was created from a revision with a known
647 # descendant, we can (lazily) correct for linkrev aliases
647 # descendant, we can (lazily) correct for linkrev aliases
648 return self._adjustlinkrev(self._path, self._filelog,
648 return self._adjustlinkrev(self._path, self._filelog,
649 self._filenode, self._descendantrev)
649 self._filenode, self._descendantrev)
650 else:
650 else:
651 return self._filelog.linkrev(self._filerev)
651 return self._filelog.linkrev(self._filerev)
652
652
653 @propertycache
653 @propertycache
654 def _filenode(self):
654 def _filenode(self):
655 if '_fileid' in self.__dict__:
655 if '_fileid' in self.__dict__:
656 return self._filelog.lookup(self._fileid)
656 return self._filelog.lookup(self._fileid)
657 else:
657 else:
658 return self._changectx.filenode(self._path)
658 return self._changectx.filenode(self._path)
659
659
660 @propertycache
660 @propertycache
661 def _filerev(self):
661 def _filerev(self):
662 return self._filelog.rev(self._filenode)
662 return self._filelog.rev(self._filenode)
663
663
664 @propertycache
664 @propertycache
665 def _repopath(self):
665 def _repopath(self):
666 return self._path
666 return self._path
667
667
668 def __nonzero__(self):
668 def __nonzero__(self):
669 try:
669 try:
670 self._filenode
670 self._filenode
671 return True
671 return True
672 except error.LookupError:
672 except error.LookupError:
673 # file is missing
673 # file is missing
674 return False
674 return False
675
675
676 def __str__(self):
676 def __str__(self):
677 return "%s@%s" % (self.path(), self._changectx)
677 return "%s@%s" % (self.path(), self._changectx)
678
678
679 def __repr__(self):
679 def __repr__(self):
680 return "<%s %s>" % (type(self).__name__, str(self))
680 return "<%s %s>" % (type(self).__name__, str(self))
681
681
682 def __hash__(self):
682 def __hash__(self):
683 try:
683 try:
684 return hash((self._path, self._filenode))
684 return hash((self._path, self._filenode))
685 except AttributeError:
685 except AttributeError:
686 return id(self)
686 return id(self)
687
687
688 def __eq__(self, other):
688 def __eq__(self, other):
689 try:
689 try:
690 return (type(self) == type(other) and self._path == other._path
690 return (type(self) == type(other) and self._path == other._path
691 and self._filenode == other._filenode)
691 and self._filenode == other._filenode)
692 except AttributeError:
692 except AttributeError:
693 return False
693 return False
694
694
695 def __ne__(self, other):
695 def __ne__(self, other):
696 return not (self == other)
696 return not (self == other)
697
697
698 def filerev(self):
698 def filerev(self):
699 return self._filerev
699 return self._filerev
700 def filenode(self):
700 def filenode(self):
701 return self._filenode
701 return self._filenode
702 def flags(self):
702 def flags(self):
703 return self._changectx.flags(self._path)
703 return self._changectx.flags(self._path)
704 def filelog(self):
704 def filelog(self):
705 return self._filelog
705 return self._filelog
706 def rev(self):
706 def rev(self):
707 return self._changeid
707 return self._changeid
708 def linkrev(self):
708 def linkrev(self):
709 return self._filelog.linkrev(self._filerev)
709 return self._filelog.linkrev(self._filerev)
710 def node(self):
710 def node(self):
711 return self._changectx.node()
711 return self._changectx.node()
712 def hex(self):
712 def hex(self):
713 return self._changectx.hex()
713 return self._changectx.hex()
714 def user(self):
714 def user(self):
715 return self._changectx.user()
715 return self._changectx.user()
716 def date(self):
716 def date(self):
717 return self._changectx.date()
717 return self._changectx.date()
718 def files(self):
718 def files(self):
719 return self._changectx.files()
719 return self._changectx.files()
720 def description(self):
720 def description(self):
721 return self._changectx.description()
721 return self._changectx.description()
722 def branch(self):
722 def branch(self):
723 return self._changectx.branch()
723 return self._changectx.branch()
724 def extra(self):
724 def extra(self):
725 return self._changectx.extra()
725 return self._changectx.extra()
726 def phase(self):
726 def phase(self):
727 return self._changectx.phase()
727 return self._changectx.phase()
728 def phasestr(self):
728 def phasestr(self):
729 return self._changectx.phasestr()
729 return self._changectx.phasestr()
730 def manifest(self):
730 def manifest(self):
731 return self._changectx.manifest()
731 return self._changectx.manifest()
732 def changectx(self):
732 def changectx(self):
733 return self._changectx
733 return self._changectx
734 def repo(self):
734 def repo(self):
735 return self._repo
735 return self._repo
736
736
737 def path(self):
737 def path(self):
738 return self._path
738 return self._path
739
739
740 def isbinary(self):
740 def isbinary(self):
741 try:
741 try:
742 return util.binary(self.data())
742 return util.binary(self.data())
743 except IOError:
743 except IOError:
744 return False
744 return False
745 def isexec(self):
745 def isexec(self):
746 return 'x' in self.flags()
746 return 'x' in self.flags()
747 def islink(self):
747 def islink(self):
748 return 'l' in self.flags()
748 return 'l' in self.flags()
749
749
750 def isabsent(self):
751 """whether this filectx represents a file not in self._changectx
752
753 This is mainly for merge code to detect change/delete conflicts. This is
754 expected to be True for all subclasses of basectx."""
755 return False
756
750 _customcmp = False
757 _customcmp = False
751 def cmp(self, fctx):
758 def cmp(self, fctx):
752 """compare with other file context
759 """compare with other file context
753
760
754 returns True if different than fctx.
761 returns True if different than fctx.
755 """
762 """
756 if fctx._customcmp:
763 if fctx._customcmp:
757 return fctx.cmp(self)
764 return fctx.cmp(self)
758
765
759 if (fctx._filerev is None
766 if (fctx._filerev is None
760 and (self._repo._encodefilterpats
767 and (self._repo._encodefilterpats
761 # if file data starts with '\1\n', empty metadata block is
768 # if file data starts with '\1\n', empty metadata block is
762 # prepended, which adds 4 bytes to filelog.size().
769 # prepended, which adds 4 bytes to filelog.size().
763 or self.size() - 4 == fctx.size())
770 or self.size() - 4 == fctx.size())
764 or self.size() == fctx.size()):
771 or self.size() == fctx.size()):
765 return self._filelog.cmp(self._filenode, fctx.data())
772 return self._filelog.cmp(self._filenode, fctx.data())
766
773
767 return True
774 return True
768
775
769 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
776 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
770 """return the first ancestor of <srcrev> introducing <fnode>
777 """return the first ancestor of <srcrev> introducing <fnode>
771
778
772 If the linkrev of the file revision does not point to an ancestor of
779 If the linkrev of the file revision does not point to an ancestor of
773 srcrev, we'll walk down the ancestors until we find one introducing
780 srcrev, we'll walk down the ancestors until we find one introducing
774 this file revision.
781 this file revision.
775
782
776 :repo: a localrepository object (used to access changelog and manifest)
783 :repo: a localrepository object (used to access changelog and manifest)
777 :path: the file path
784 :path: the file path
778 :fnode: the nodeid of the file revision
785 :fnode: the nodeid of the file revision
779 :filelog: the filelog of this path
786 :filelog: the filelog of this path
780 :srcrev: the changeset revision we search ancestors from
787 :srcrev: the changeset revision we search ancestors from
781 :inclusive: if true, the src revision will also be checked
788 :inclusive: if true, the src revision will also be checked
782 """
789 """
783 repo = self._repo
790 repo = self._repo
784 cl = repo.unfiltered().changelog
791 cl = repo.unfiltered().changelog
785 ma = repo.manifest
792 ma = repo.manifest
786 # fetch the linkrev
793 # fetch the linkrev
787 fr = filelog.rev(fnode)
794 fr = filelog.rev(fnode)
788 lkr = filelog.linkrev(fr)
795 lkr = filelog.linkrev(fr)
789 # hack to reuse ancestor computation when searching for renames
796 # hack to reuse ancestor computation when searching for renames
790 memberanc = getattr(self, '_ancestrycontext', None)
797 memberanc = getattr(self, '_ancestrycontext', None)
791 iteranc = None
798 iteranc = None
792 if srcrev is None:
799 if srcrev is None:
793 # wctx case, used by workingfilectx during mergecopy
800 # wctx case, used by workingfilectx during mergecopy
794 revs = [p.rev() for p in self._repo[None].parents()]
801 revs = [p.rev() for p in self._repo[None].parents()]
795 inclusive = True # we skipped the real (revless) source
802 inclusive = True # we skipped the real (revless) source
796 else:
803 else:
797 revs = [srcrev]
804 revs = [srcrev]
798 if memberanc is None:
805 if memberanc is None:
799 memberanc = iteranc = cl.ancestors(revs, lkr,
806 memberanc = iteranc = cl.ancestors(revs, lkr,
800 inclusive=inclusive)
807 inclusive=inclusive)
801 # check if this linkrev is an ancestor of srcrev
808 # check if this linkrev is an ancestor of srcrev
802 if lkr not in memberanc:
809 if lkr not in memberanc:
803 if iteranc is None:
810 if iteranc is None:
804 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
811 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
805 for a in iteranc:
812 for a in iteranc:
806 ac = cl.read(a) # get changeset data (we avoid object creation)
813 ac = cl.read(a) # get changeset data (we avoid object creation)
807 if path in ac[3]: # checking the 'files' field.
814 if path in ac[3]: # checking the 'files' field.
808 # The file has been touched, check if the content is
815 # The file has been touched, check if the content is
809 # similar to the one we search for.
816 # similar to the one we search for.
810 if fnode == ma.readfast(ac[0]).get(path):
817 if fnode == ma.readfast(ac[0]).get(path):
811 return a
818 return a
812 # In theory, we should never get out of that loop without a result.
819 # In theory, we should never get out of that loop without a result.
813 # But if manifest uses a buggy file revision (not children of the
820 # But if manifest uses a buggy file revision (not children of the
814 # one it replaces) we could. Such a buggy situation will likely
821 # one it replaces) we could. Such a buggy situation will likely
815 # result is crash somewhere else at to some point.
822 # result is crash somewhere else at to some point.
816 return lkr
823 return lkr
817
824
818 def introrev(self):
825 def introrev(self):
819 """return the rev of the changeset which introduced this file revision
826 """return the rev of the changeset which introduced this file revision
820
827
821 This method is different from linkrev because it take into account the
828 This method is different from linkrev because it take into account the
822 changeset the filectx was created from. It ensures the returned
829 changeset the filectx was created from. It ensures the returned
823 revision is one of its ancestors. This prevents bugs from
830 revision is one of its ancestors. This prevents bugs from
824 'linkrev-shadowing' when a file revision is used by multiple
831 'linkrev-shadowing' when a file revision is used by multiple
825 changesets.
832 changesets.
826 """
833 """
827 lkr = self.linkrev()
834 lkr = self.linkrev()
828 attrs = vars(self)
835 attrs = vars(self)
829 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
836 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
830 if noctx or self.rev() == lkr:
837 if noctx or self.rev() == lkr:
831 return self.linkrev()
838 return self.linkrev()
832 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
839 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
833 self.rev(), inclusive=True)
840 self.rev(), inclusive=True)
834
841
835 def _parentfilectx(self, path, fileid, filelog):
842 def _parentfilectx(self, path, fileid, filelog):
836 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
843 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
837 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
844 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
838 if '_changeid' in vars(self) or '_changectx' in vars(self):
845 if '_changeid' in vars(self) or '_changectx' in vars(self):
839 # If self is associated with a changeset (probably explicitly
846 # If self is associated with a changeset (probably explicitly
840 # fed), ensure the created filectx is associated with a
847 # fed), ensure the created filectx is associated with a
841 # changeset that is an ancestor of self.changectx.
848 # changeset that is an ancestor of self.changectx.
842 # This lets us later use _adjustlinkrev to get a correct link.
849 # This lets us later use _adjustlinkrev to get a correct link.
843 fctx._descendantrev = self.rev()
850 fctx._descendantrev = self.rev()
844 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
851 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
845 elif '_descendantrev' in vars(self):
852 elif '_descendantrev' in vars(self):
846 # Otherwise propagate _descendantrev if we have one associated.
853 # Otherwise propagate _descendantrev if we have one associated.
847 fctx._descendantrev = self._descendantrev
854 fctx._descendantrev = self._descendantrev
848 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
855 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
849 return fctx
856 return fctx
850
857
851 def parents(self):
858 def parents(self):
852 _path = self._path
859 _path = self._path
853 fl = self._filelog
860 fl = self._filelog
854 parents = self._filelog.parents(self._filenode)
861 parents = self._filelog.parents(self._filenode)
855 pl = [(_path, node, fl) for node in parents if node != nullid]
862 pl = [(_path, node, fl) for node in parents if node != nullid]
856
863
857 r = fl.renamed(self._filenode)
864 r = fl.renamed(self._filenode)
858 if r:
865 if r:
859 # - In the simple rename case, both parent are nullid, pl is empty.
866 # - In the simple rename case, both parent are nullid, pl is empty.
860 # - In case of merge, only one of the parent is null id and should
867 # - In case of merge, only one of the parent is null id and should
861 # be replaced with the rename information. This parent is -always-
868 # be replaced with the rename information. This parent is -always-
862 # the first one.
869 # the first one.
863 #
870 #
864 # As null id have always been filtered out in the previous list
871 # As null id have always been filtered out in the previous list
865 # comprehension, inserting to 0 will always result in "replacing
872 # comprehension, inserting to 0 will always result in "replacing
866 # first nullid parent with rename information.
873 # first nullid parent with rename information.
867 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
874 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
868
875
869 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
876 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
870
877
871 def p1(self):
878 def p1(self):
872 return self.parents()[0]
879 return self.parents()[0]
873
880
874 def p2(self):
881 def p2(self):
875 p = self.parents()
882 p = self.parents()
876 if len(p) == 2:
883 if len(p) == 2:
877 return p[1]
884 return p[1]
878 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
885 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
879
886
880 def annotate(self, follow=False, linenumber=None, diffopts=None):
887 def annotate(self, follow=False, linenumber=None, diffopts=None):
881 '''returns a list of tuples of (ctx, line) for each line
888 '''returns a list of tuples of (ctx, line) for each line
882 in the file, where ctx is the filectx of the node where
889 in the file, where ctx is the filectx of the node where
883 that line was last changed.
890 that line was last changed.
884 This returns tuples of ((ctx, linenumber), line) for each line,
891 This returns tuples of ((ctx, linenumber), line) for each line,
885 if "linenumber" parameter is NOT "None".
892 if "linenumber" parameter is NOT "None".
886 In such tuples, linenumber means one at the first appearance
893 In such tuples, linenumber means one at the first appearance
887 in the managed file.
894 in the managed file.
888 To reduce annotation cost,
895 To reduce annotation cost,
889 this returns fixed value(False is used) as linenumber,
896 this returns fixed value(False is used) as linenumber,
890 if "linenumber" parameter is "False".'''
897 if "linenumber" parameter is "False".'''
891
898
892 if linenumber is None:
899 if linenumber is None:
893 def decorate(text, rev):
900 def decorate(text, rev):
894 return ([rev] * len(text.splitlines()), text)
901 return ([rev] * len(text.splitlines()), text)
895 elif linenumber:
902 elif linenumber:
896 def decorate(text, rev):
903 def decorate(text, rev):
897 size = len(text.splitlines())
904 size = len(text.splitlines())
898 return ([(rev, i) for i in xrange(1, size + 1)], text)
905 return ([(rev, i) for i in xrange(1, size + 1)], text)
899 else:
906 else:
900 def decorate(text, rev):
907 def decorate(text, rev):
901 return ([(rev, False)] * len(text.splitlines()), text)
908 return ([(rev, False)] * len(text.splitlines()), text)
902
909
903 def pair(parent, child):
910 def pair(parent, child):
904 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
911 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
905 refine=True)
912 refine=True)
906 for (a1, a2, b1, b2), t in blocks:
913 for (a1, a2, b1, b2), t in blocks:
907 # Changed blocks ('!') or blocks made only of blank lines ('~')
914 # Changed blocks ('!') or blocks made only of blank lines ('~')
908 # belong to the child.
915 # belong to the child.
909 if t == '=':
916 if t == '=':
910 child[0][b1:b2] = parent[0][a1:a2]
917 child[0][b1:b2] = parent[0][a1:a2]
911 return child
918 return child
912
919
913 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
920 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
914
921
915 def parents(f):
922 def parents(f):
916 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
923 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
917 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
924 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
918 # from the topmost introrev (= srcrev) down to p.linkrev() if it
925 # from the topmost introrev (= srcrev) down to p.linkrev() if it
919 # isn't an ancestor of the srcrev.
926 # isn't an ancestor of the srcrev.
920 f._changeid
927 f._changeid
921 pl = f.parents()
928 pl = f.parents()
922
929
923 # Don't return renamed parents if we aren't following.
930 # Don't return renamed parents if we aren't following.
924 if not follow:
931 if not follow:
925 pl = [p for p in pl if p.path() == f.path()]
932 pl = [p for p in pl if p.path() == f.path()]
926
933
927 # renamed filectx won't have a filelog yet, so set it
934 # renamed filectx won't have a filelog yet, so set it
928 # from the cache to save time
935 # from the cache to save time
929 for p in pl:
936 for p in pl:
930 if not '_filelog' in p.__dict__:
937 if not '_filelog' in p.__dict__:
931 p._filelog = getlog(p.path())
938 p._filelog = getlog(p.path())
932
939
933 return pl
940 return pl
934
941
935 # use linkrev to find the first changeset where self appeared
942 # use linkrev to find the first changeset where self appeared
936 base = self
943 base = self
937 introrev = self.introrev()
944 introrev = self.introrev()
938 if self.rev() != introrev:
945 if self.rev() != introrev:
939 base = self.filectx(self.filenode(), changeid=introrev)
946 base = self.filectx(self.filenode(), changeid=introrev)
940 if getattr(base, '_ancestrycontext', None) is None:
947 if getattr(base, '_ancestrycontext', None) is None:
941 cl = self._repo.changelog
948 cl = self._repo.changelog
942 if introrev is None:
949 if introrev is None:
943 # wctx is not inclusive, but works because _ancestrycontext
950 # wctx is not inclusive, but works because _ancestrycontext
944 # is used to test filelog revisions
951 # is used to test filelog revisions
945 ac = cl.ancestors([p.rev() for p in base.parents()],
952 ac = cl.ancestors([p.rev() for p in base.parents()],
946 inclusive=True)
953 inclusive=True)
947 else:
954 else:
948 ac = cl.ancestors([introrev], inclusive=True)
955 ac = cl.ancestors([introrev], inclusive=True)
949 base._ancestrycontext = ac
956 base._ancestrycontext = ac
950
957
951 # This algorithm would prefer to be recursive, but Python is a
958 # This algorithm would prefer to be recursive, but Python is a
952 # bit recursion-hostile. Instead we do an iterative
959 # bit recursion-hostile. Instead we do an iterative
953 # depth-first search.
960 # depth-first search.
954
961
955 visit = [base]
962 visit = [base]
956 hist = {}
963 hist = {}
957 pcache = {}
964 pcache = {}
958 needed = {base: 1}
965 needed = {base: 1}
959 while visit:
966 while visit:
960 f = visit[-1]
967 f = visit[-1]
961 pcached = f in pcache
968 pcached = f in pcache
962 if not pcached:
969 if not pcached:
963 pcache[f] = parents(f)
970 pcache[f] = parents(f)
964
971
965 ready = True
972 ready = True
966 pl = pcache[f]
973 pl = pcache[f]
967 for p in pl:
974 for p in pl:
968 if p not in hist:
975 if p not in hist:
969 ready = False
976 ready = False
970 visit.append(p)
977 visit.append(p)
971 if not pcached:
978 if not pcached:
972 needed[p] = needed.get(p, 0) + 1
979 needed[p] = needed.get(p, 0) + 1
973 if ready:
980 if ready:
974 visit.pop()
981 visit.pop()
975 reusable = f in hist
982 reusable = f in hist
976 if reusable:
983 if reusable:
977 curr = hist[f]
984 curr = hist[f]
978 else:
985 else:
979 curr = decorate(f.data(), f)
986 curr = decorate(f.data(), f)
980 for p in pl:
987 for p in pl:
981 if not reusable:
988 if not reusable:
982 curr = pair(hist[p], curr)
989 curr = pair(hist[p], curr)
983 if needed[p] == 1:
990 if needed[p] == 1:
984 del hist[p]
991 del hist[p]
985 del needed[p]
992 del needed[p]
986 else:
993 else:
987 needed[p] -= 1
994 needed[p] -= 1
988
995
989 hist[f] = curr
996 hist[f] = curr
990 pcache[f] = []
997 pcache[f] = []
991
998
992 return zip(hist[base][0], hist[base][1].splitlines(True))
999 return zip(hist[base][0], hist[base][1].splitlines(True))
993
1000
994 def ancestors(self, followfirst=False):
1001 def ancestors(self, followfirst=False):
995 visit = {}
1002 visit = {}
996 c = self
1003 c = self
997 if followfirst:
1004 if followfirst:
998 cut = 1
1005 cut = 1
999 else:
1006 else:
1000 cut = None
1007 cut = None
1001
1008
1002 while True:
1009 while True:
1003 for parent in c.parents()[:cut]:
1010 for parent in c.parents()[:cut]:
1004 visit[(parent.linkrev(), parent.filenode())] = parent
1011 visit[(parent.linkrev(), parent.filenode())] = parent
1005 if not visit:
1012 if not visit:
1006 break
1013 break
1007 c = visit.pop(max(visit))
1014 c = visit.pop(max(visit))
1008 yield c
1015 yield c
1009
1016
1010 class filectx(basefilectx):
1017 class filectx(basefilectx):
1011 """A filecontext object makes access to data related to a particular
1018 """A filecontext object makes access to data related to a particular
1012 filerevision convenient."""
1019 filerevision convenient."""
1013 def __init__(self, repo, path, changeid=None, fileid=None,
1020 def __init__(self, repo, path, changeid=None, fileid=None,
1014 filelog=None, changectx=None):
1021 filelog=None, changectx=None):
1015 """changeid can be a changeset revision, node, or tag.
1022 """changeid can be a changeset revision, node, or tag.
1016 fileid can be a file revision or node."""
1023 fileid can be a file revision or node."""
1017 self._repo = repo
1024 self._repo = repo
1018 self._path = path
1025 self._path = path
1019
1026
1020 assert (changeid is not None
1027 assert (changeid is not None
1021 or fileid is not None
1028 or fileid is not None
1022 or changectx is not None), \
1029 or changectx is not None), \
1023 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1030 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1024 % (changeid, fileid, changectx))
1031 % (changeid, fileid, changectx))
1025
1032
1026 if filelog is not None:
1033 if filelog is not None:
1027 self._filelog = filelog
1034 self._filelog = filelog
1028
1035
1029 if changeid is not None:
1036 if changeid is not None:
1030 self._changeid = changeid
1037 self._changeid = changeid
1031 if changectx is not None:
1038 if changectx is not None:
1032 self._changectx = changectx
1039 self._changectx = changectx
1033 if fileid is not None:
1040 if fileid is not None:
1034 self._fileid = fileid
1041 self._fileid = fileid
1035
1042
1036 @propertycache
1043 @propertycache
1037 def _changectx(self):
1044 def _changectx(self):
1038 try:
1045 try:
1039 return changectx(self._repo, self._changeid)
1046 return changectx(self._repo, self._changeid)
1040 except error.FilteredRepoLookupError:
1047 except error.FilteredRepoLookupError:
1041 # Linkrev may point to any revision in the repository. When the
1048 # Linkrev may point to any revision in the repository. When the
1042 # repository is filtered this may lead to `filectx` trying to build
1049 # repository is filtered this may lead to `filectx` trying to build
1043 # `changectx` for filtered revision. In such case we fallback to
1050 # `changectx` for filtered revision. In such case we fallback to
1044 # creating `changectx` on the unfiltered version of the reposition.
1051 # creating `changectx` on the unfiltered version of the reposition.
1045 # This fallback should not be an issue because `changectx` from
1052 # This fallback should not be an issue because `changectx` from
1046 # `filectx` are not used in complex operations that care about
1053 # `filectx` are not used in complex operations that care about
1047 # filtering.
1054 # filtering.
1048 #
1055 #
1049 # This fallback is a cheap and dirty fix that prevent several
1056 # This fallback is a cheap and dirty fix that prevent several
1050 # crashes. It does not ensure the behavior is correct. However the
1057 # crashes. It does not ensure the behavior is correct. However the
1051 # behavior was not correct before filtering either and "incorrect
1058 # behavior was not correct before filtering either and "incorrect
1052 # behavior" is seen as better as "crash"
1059 # behavior" is seen as better as "crash"
1053 #
1060 #
1054 # Linkrevs have several serious troubles with filtering that are
1061 # Linkrevs have several serious troubles with filtering that are
1055 # complicated to solve. Proper handling of the issue here should be
1062 # complicated to solve. Proper handling of the issue here should be
1056 # considered when solving linkrev issue are on the table.
1063 # considered when solving linkrev issue are on the table.
1057 return changectx(self._repo.unfiltered(), self._changeid)
1064 return changectx(self._repo.unfiltered(), self._changeid)
1058
1065
1059 def filectx(self, fileid, changeid=None):
1066 def filectx(self, fileid, changeid=None):
1060 '''opens an arbitrary revision of the file without
1067 '''opens an arbitrary revision of the file without
1061 opening a new filelog'''
1068 opening a new filelog'''
1062 return filectx(self._repo, self._path, fileid=fileid,
1069 return filectx(self._repo, self._path, fileid=fileid,
1063 filelog=self._filelog, changeid=changeid)
1070 filelog=self._filelog, changeid=changeid)
1064
1071
1065 def data(self):
1072 def data(self):
1066 try:
1073 try:
1067 return self._filelog.read(self._filenode)
1074 return self._filelog.read(self._filenode)
1068 except error.CensoredNodeError:
1075 except error.CensoredNodeError:
1069 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1076 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1070 return ""
1077 return ""
1071 raise error.Abort(_("censored node: %s") % short(self._filenode),
1078 raise error.Abort(_("censored node: %s") % short(self._filenode),
1072 hint=_("set censor.policy to ignore errors"))
1079 hint=_("set censor.policy to ignore errors"))
1073
1080
1074 def size(self):
1081 def size(self):
1075 return self._filelog.size(self._filerev)
1082 return self._filelog.size(self._filerev)
1076
1083
1077 def renamed(self):
1084 def renamed(self):
1078 """check if file was actually renamed in this changeset revision
1085 """check if file was actually renamed in this changeset revision
1079
1086
1080 If rename logged in file revision, we report copy for changeset only
1087 If rename logged in file revision, we report copy for changeset only
1081 if file revisions linkrev points back to the changeset in question
1088 if file revisions linkrev points back to the changeset in question
1082 or both changeset parents contain different file revisions.
1089 or both changeset parents contain different file revisions.
1083 """
1090 """
1084
1091
1085 renamed = self._filelog.renamed(self._filenode)
1092 renamed = self._filelog.renamed(self._filenode)
1086 if not renamed:
1093 if not renamed:
1087 return renamed
1094 return renamed
1088
1095
1089 if self.rev() == self.linkrev():
1096 if self.rev() == self.linkrev():
1090 return renamed
1097 return renamed
1091
1098
1092 name = self.path()
1099 name = self.path()
1093 fnode = self._filenode
1100 fnode = self._filenode
1094 for p in self._changectx.parents():
1101 for p in self._changectx.parents():
1095 try:
1102 try:
1096 if fnode == p.filenode(name):
1103 if fnode == p.filenode(name):
1097 return None
1104 return None
1098 except error.LookupError:
1105 except error.LookupError:
1099 pass
1106 pass
1100 return renamed
1107 return renamed
1101
1108
1102 def children(self):
1109 def children(self):
1103 # hard for renames
1110 # hard for renames
1104 c = self._filelog.children(self._filenode)
1111 c = self._filelog.children(self._filenode)
1105 return [filectx(self._repo, self._path, fileid=x,
1112 return [filectx(self._repo, self._path, fileid=x,
1106 filelog=self._filelog) for x in c]
1113 filelog=self._filelog) for x in c]
1107
1114
1108 class committablectx(basectx):
1115 class committablectx(basectx):
1109 """A committablectx object provides common functionality for a context that
1116 """A committablectx object provides common functionality for a context that
1110 wants the ability to commit, e.g. workingctx or memctx."""
1117 wants the ability to commit, e.g. workingctx or memctx."""
1111 def __init__(self, repo, text="", user=None, date=None, extra=None,
1118 def __init__(self, repo, text="", user=None, date=None, extra=None,
1112 changes=None):
1119 changes=None):
1113 self._repo = repo
1120 self._repo = repo
1114 self._rev = None
1121 self._rev = None
1115 self._node = None
1122 self._node = None
1116 self._text = text
1123 self._text = text
1117 if date:
1124 if date:
1118 self._date = util.parsedate(date)
1125 self._date = util.parsedate(date)
1119 if user:
1126 if user:
1120 self._user = user
1127 self._user = user
1121 if changes:
1128 if changes:
1122 self._status = changes
1129 self._status = changes
1123
1130
1124 self._extra = {}
1131 self._extra = {}
1125 if extra:
1132 if extra:
1126 self._extra = extra.copy()
1133 self._extra = extra.copy()
1127 if 'branch' not in self._extra:
1134 if 'branch' not in self._extra:
1128 try:
1135 try:
1129 branch = encoding.fromlocal(self._repo.dirstate.branch())
1136 branch = encoding.fromlocal(self._repo.dirstate.branch())
1130 except UnicodeDecodeError:
1137 except UnicodeDecodeError:
1131 raise error.Abort(_('branch name not in UTF-8!'))
1138 raise error.Abort(_('branch name not in UTF-8!'))
1132 self._extra['branch'] = branch
1139 self._extra['branch'] = branch
1133 if self._extra['branch'] == '':
1140 if self._extra['branch'] == '':
1134 self._extra['branch'] = 'default'
1141 self._extra['branch'] = 'default'
1135
1142
1136 def __str__(self):
1143 def __str__(self):
1137 return str(self._parents[0]) + "+"
1144 return str(self._parents[0]) + "+"
1138
1145
1139 def __nonzero__(self):
1146 def __nonzero__(self):
1140 return True
1147 return True
1141
1148
1142 def _buildflagfunc(self):
1149 def _buildflagfunc(self):
1143 # Create a fallback function for getting file flags when the
1150 # Create a fallback function for getting file flags when the
1144 # filesystem doesn't support them
1151 # filesystem doesn't support them
1145
1152
1146 copiesget = self._repo.dirstate.copies().get
1153 copiesget = self._repo.dirstate.copies().get
1147
1154
1148 if len(self._parents) < 2:
1155 if len(self._parents) < 2:
1149 # when we have one parent, it's easy: copy from parent
1156 # when we have one parent, it's easy: copy from parent
1150 man = self._parents[0].manifest()
1157 man = self._parents[0].manifest()
1151 def func(f):
1158 def func(f):
1152 f = copiesget(f, f)
1159 f = copiesget(f, f)
1153 return man.flags(f)
1160 return man.flags(f)
1154 else:
1161 else:
1155 # merges are tricky: we try to reconstruct the unstored
1162 # merges are tricky: we try to reconstruct the unstored
1156 # result from the merge (issue1802)
1163 # result from the merge (issue1802)
1157 p1, p2 = self._parents
1164 p1, p2 = self._parents
1158 pa = p1.ancestor(p2)
1165 pa = p1.ancestor(p2)
1159 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1166 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1160
1167
1161 def func(f):
1168 def func(f):
1162 f = copiesget(f, f) # may be wrong for merges with copies
1169 f = copiesget(f, f) # may be wrong for merges with copies
1163 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1170 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1164 if fl1 == fl2:
1171 if fl1 == fl2:
1165 return fl1
1172 return fl1
1166 if fl1 == fla:
1173 if fl1 == fla:
1167 return fl2
1174 return fl2
1168 if fl2 == fla:
1175 if fl2 == fla:
1169 return fl1
1176 return fl1
1170 return '' # punt for conflicts
1177 return '' # punt for conflicts
1171
1178
1172 return func
1179 return func
1173
1180
1174 @propertycache
1181 @propertycache
1175 def _flagfunc(self):
1182 def _flagfunc(self):
1176 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1183 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1177
1184
1178 @propertycache
1185 @propertycache
1179 def _manifest(self):
1186 def _manifest(self):
1180 """generate a manifest corresponding to the values in self._status
1187 """generate a manifest corresponding to the values in self._status
1181
1188
1182 This reuse the file nodeid from parent, but we append an extra letter
1189 This reuse the file nodeid from parent, but we append an extra letter
1183 when modified. Modified files get an extra 'm' while added files get
1190 when modified. Modified files get an extra 'm' while added files get
1184 an extra 'a'. This is used by manifests merge to see that files
1191 an extra 'a'. This is used by manifests merge to see that files
1185 are different and by update logic to avoid deleting newly added files.
1192 are different and by update logic to avoid deleting newly added files.
1186 """
1193 """
1187
1194
1188 man1 = self._parents[0].manifest()
1195 man1 = self._parents[0].manifest()
1189 man = man1.copy()
1196 man = man1.copy()
1190 if len(self._parents) > 1:
1197 if len(self._parents) > 1:
1191 man2 = self.p2().manifest()
1198 man2 = self.p2().manifest()
1192 def getman(f):
1199 def getman(f):
1193 if f in man1:
1200 if f in man1:
1194 return man1
1201 return man1
1195 return man2
1202 return man2
1196 else:
1203 else:
1197 getman = lambda f: man1
1204 getman = lambda f: man1
1198
1205
1199 copied = self._repo.dirstate.copies()
1206 copied = self._repo.dirstate.copies()
1200 ff = self._flagfunc
1207 ff = self._flagfunc
1201 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1208 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1202 for f in l:
1209 for f in l:
1203 orig = copied.get(f, f)
1210 orig = copied.get(f, f)
1204 man[f] = getman(orig).get(orig, nullid) + i
1211 man[f] = getman(orig).get(orig, nullid) + i
1205 try:
1212 try:
1206 man.setflag(f, ff(f))
1213 man.setflag(f, ff(f))
1207 except OSError:
1214 except OSError:
1208 pass
1215 pass
1209
1216
1210 for f in self._status.deleted + self._status.removed:
1217 for f in self._status.deleted + self._status.removed:
1211 if f in man:
1218 if f in man:
1212 del man[f]
1219 del man[f]
1213
1220
1214 return man
1221 return man
1215
1222
1216 @propertycache
1223 @propertycache
1217 def _status(self):
1224 def _status(self):
1218 return self._repo.status()
1225 return self._repo.status()
1219
1226
1220 @propertycache
1227 @propertycache
1221 def _user(self):
1228 def _user(self):
1222 return self._repo.ui.username()
1229 return self._repo.ui.username()
1223
1230
1224 @propertycache
1231 @propertycache
1225 def _date(self):
1232 def _date(self):
1226 return util.makedate()
1233 return util.makedate()
1227
1234
1228 def subrev(self, subpath):
1235 def subrev(self, subpath):
1229 return None
1236 return None
1230
1237
1231 def manifestnode(self):
1238 def manifestnode(self):
1232 return None
1239 return None
1233 def user(self):
1240 def user(self):
1234 return self._user or self._repo.ui.username()
1241 return self._user or self._repo.ui.username()
1235 def date(self):
1242 def date(self):
1236 return self._date
1243 return self._date
1237 def description(self):
1244 def description(self):
1238 return self._text
1245 return self._text
1239 def files(self):
1246 def files(self):
1240 return sorted(self._status.modified + self._status.added +
1247 return sorted(self._status.modified + self._status.added +
1241 self._status.removed)
1248 self._status.removed)
1242
1249
1243 def modified(self):
1250 def modified(self):
1244 return self._status.modified
1251 return self._status.modified
1245 def added(self):
1252 def added(self):
1246 return self._status.added
1253 return self._status.added
1247 def removed(self):
1254 def removed(self):
1248 return self._status.removed
1255 return self._status.removed
1249 def deleted(self):
1256 def deleted(self):
1250 return self._status.deleted
1257 return self._status.deleted
1251 def branch(self):
1258 def branch(self):
1252 return encoding.tolocal(self._extra['branch'])
1259 return encoding.tolocal(self._extra['branch'])
1253 def closesbranch(self):
1260 def closesbranch(self):
1254 return 'close' in self._extra
1261 return 'close' in self._extra
1255 def extra(self):
1262 def extra(self):
1256 return self._extra
1263 return self._extra
1257
1264
1258 def tags(self):
1265 def tags(self):
1259 return []
1266 return []
1260
1267
1261 def bookmarks(self):
1268 def bookmarks(self):
1262 b = []
1269 b = []
1263 for p in self.parents():
1270 for p in self.parents():
1264 b.extend(p.bookmarks())
1271 b.extend(p.bookmarks())
1265 return b
1272 return b
1266
1273
1267 def phase(self):
1274 def phase(self):
1268 phase = phases.draft # default phase to draft
1275 phase = phases.draft # default phase to draft
1269 for p in self.parents():
1276 for p in self.parents():
1270 phase = max(phase, p.phase())
1277 phase = max(phase, p.phase())
1271 return phase
1278 return phase
1272
1279
1273 def hidden(self):
1280 def hidden(self):
1274 return False
1281 return False
1275
1282
1276 def children(self):
1283 def children(self):
1277 return []
1284 return []
1278
1285
1279 def flags(self, path):
1286 def flags(self, path):
1280 if '_manifest' in self.__dict__:
1287 if '_manifest' in self.__dict__:
1281 try:
1288 try:
1282 return self._manifest.flags(path)
1289 return self._manifest.flags(path)
1283 except KeyError:
1290 except KeyError:
1284 return ''
1291 return ''
1285
1292
1286 try:
1293 try:
1287 return self._flagfunc(path)
1294 return self._flagfunc(path)
1288 except OSError:
1295 except OSError:
1289 return ''
1296 return ''
1290
1297
1291 def ancestor(self, c2):
1298 def ancestor(self, c2):
1292 """return the "best" ancestor context of self and c2"""
1299 """return the "best" ancestor context of self and c2"""
1293 return self._parents[0].ancestor(c2) # punt on two parents for now
1300 return self._parents[0].ancestor(c2) # punt on two parents for now
1294
1301
1295 def walk(self, match):
1302 def walk(self, match):
1296 '''Generates matching file names.'''
1303 '''Generates matching file names.'''
1297 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1304 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1298 True, False))
1305 True, False))
1299
1306
1300 def matches(self, match):
1307 def matches(self, match):
1301 return sorted(self._repo.dirstate.matches(match))
1308 return sorted(self._repo.dirstate.matches(match))
1302
1309
1303 def ancestors(self):
1310 def ancestors(self):
1304 for p in self._parents:
1311 for p in self._parents:
1305 yield p
1312 yield p
1306 for a in self._repo.changelog.ancestors(
1313 for a in self._repo.changelog.ancestors(
1307 [p.rev() for p in self._parents]):
1314 [p.rev() for p in self._parents]):
1308 yield changectx(self._repo, a)
1315 yield changectx(self._repo, a)
1309
1316
1310 def markcommitted(self, node):
1317 def markcommitted(self, node):
1311 """Perform post-commit cleanup necessary after committing this ctx
1318 """Perform post-commit cleanup necessary after committing this ctx
1312
1319
1313 Specifically, this updates backing stores this working context
1320 Specifically, this updates backing stores this working context
1314 wraps to reflect the fact that the changes reflected by this
1321 wraps to reflect the fact that the changes reflected by this
1315 workingctx have been committed. For example, it marks
1322 workingctx have been committed. For example, it marks
1316 modified and added files as normal in the dirstate.
1323 modified and added files as normal in the dirstate.
1317
1324
1318 """
1325 """
1319
1326
1320 self._repo.dirstate.beginparentchange()
1327 self._repo.dirstate.beginparentchange()
1321 for f in self.modified() + self.added():
1328 for f in self.modified() + self.added():
1322 self._repo.dirstate.normal(f)
1329 self._repo.dirstate.normal(f)
1323 for f in self.removed():
1330 for f in self.removed():
1324 self._repo.dirstate.drop(f)
1331 self._repo.dirstate.drop(f)
1325 self._repo.dirstate.setparents(node)
1332 self._repo.dirstate.setparents(node)
1326 self._repo.dirstate.endparentchange()
1333 self._repo.dirstate.endparentchange()
1327
1334
1328 # write changes out explicitly, because nesting wlock at
1335 # write changes out explicitly, because nesting wlock at
1329 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1336 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1330 # from immediately doing so for subsequent changing files
1337 # from immediately doing so for subsequent changing files
1331 self._repo.dirstate.write(self._repo.currenttransaction())
1338 self._repo.dirstate.write(self._repo.currenttransaction())
1332
1339
1333 class workingctx(committablectx):
1340 class workingctx(committablectx):
1334 """A workingctx object makes access to data related to
1341 """A workingctx object makes access to data related to
1335 the current working directory convenient.
1342 the current working directory convenient.
1336 date - any valid date string or (unixtime, offset), or None.
1343 date - any valid date string or (unixtime, offset), or None.
1337 user - username string, or None.
1344 user - username string, or None.
1338 extra - a dictionary of extra values, or None.
1345 extra - a dictionary of extra values, or None.
1339 changes - a list of file lists as returned by localrepo.status()
1346 changes - a list of file lists as returned by localrepo.status()
1340 or None to use the repository status.
1347 or None to use the repository status.
1341 """
1348 """
1342 def __init__(self, repo, text="", user=None, date=None, extra=None,
1349 def __init__(self, repo, text="", user=None, date=None, extra=None,
1343 changes=None):
1350 changes=None):
1344 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1351 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1345
1352
1346 def __iter__(self):
1353 def __iter__(self):
1347 d = self._repo.dirstate
1354 d = self._repo.dirstate
1348 for f in d:
1355 for f in d:
1349 if d[f] != 'r':
1356 if d[f] != 'r':
1350 yield f
1357 yield f
1351
1358
1352 def __contains__(self, key):
1359 def __contains__(self, key):
1353 return self._repo.dirstate[key] not in "?r"
1360 return self._repo.dirstate[key] not in "?r"
1354
1361
1355 def hex(self):
1362 def hex(self):
1356 return hex(wdirid)
1363 return hex(wdirid)
1357
1364
1358 @propertycache
1365 @propertycache
1359 def _parents(self):
1366 def _parents(self):
1360 p = self._repo.dirstate.parents()
1367 p = self._repo.dirstate.parents()
1361 if p[1] == nullid:
1368 if p[1] == nullid:
1362 p = p[:-1]
1369 p = p[:-1]
1363 return [changectx(self._repo, x) for x in p]
1370 return [changectx(self._repo, x) for x in p]
1364
1371
1365 def filectx(self, path, filelog=None):
1372 def filectx(self, path, filelog=None):
1366 """get a file context from the working directory"""
1373 """get a file context from the working directory"""
1367 return workingfilectx(self._repo, path, workingctx=self,
1374 return workingfilectx(self._repo, path, workingctx=self,
1368 filelog=filelog)
1375 filelog=filelog)
1369
1376
1370 def dirty(self, missing=False, merge=True, branch=True):
1377 def dirty(self, missing=False, merge=True, branch=True):
1371 "check whether a working directory is modified"
1378 "check whether a working directory is modified"
1372 # check subrepos first
1379 # check subrepos first
1373 for s in sorted(self.substate):
1380 for s in sorted(self.substate):
1374 if self.sub(s).dirty():
1381 if self.sub(s).dirty():
1375 return True
1382 return True
1376 # check current working dir
1383 # check current working dir
1377 return ((merge and self.p2()) or
1384 return ((merge and self.p2()) or
1378 (branch and self.branch() != self.p1().branch()) or
1385 (branch and self.branch() != self.p1().branch()) or
1379 self.modified() or self.added() or self.removed() or
1386 self.modified() or self.added() or self.removed() or
1380 (missing and self.deleted()))
1387 (missing and self.deleted()))
1381
1388
1382 def add(self, list, prefix=""):
1389 def add(self, list, prefix=""):
1383 join = lambda f: os.path.join(prefix, f)
1390 join = lambda f: os.path.join(prefix, f)
1384 wlock = self._repo.wlock()
1391 wlock = self._repo.wlock()
1385 ui, ds = self._repo.ui, self._repo.dirstate
1392 ui, ds = self._repo.ui, self._repo.dirstate
1386 try:
1393 try:
1387 rejected = []
1394 rejected = []
1388 lstat = self._repo.wvfs.lstat
1395 lstat = self._repo.wvfs.lstat
1389 for f in list:
1396 for f in list:
1390 scmutil.checkportable(ui, join(f))
1397 scmutil.checkportable(ui, join(f))
1391 try:
1398 try:
1392 st = lstat(f)
1399 st = lstat(f)
1393 except OSError:
1400 except OSError:
1394 ui.warn(_("%s does not exist!\n") % join(f))
1401 ui.warn(_("%s does not exist!\n") % join(f))
1395 rejected.append(f)
1402 rejected.append(f)
1396 continue
1403 continue
1397 if st.st_size > 10000000:
1404 if st.st_size > 10000000:
1398 ui.warn(_("%s: up to %d MB of RAM may be required "
1405 ui.warn(_("%s: up to %d MB of RAM may be required "
1399 "to manage this file\n"
1406 "to manage this file\n"
1400 "(use 'hg revert %s' to cancel the "
1407 "(use 'hg revert %s' to cancel the "
1401 "pending addition)\n")
1408 "pending addition)\n")
1402 % (f, 3 * st.st_size // 1000000, join(f)))
1409 % (f, 3 * st.st_size // 1000000, join(f)))
1403 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1410 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1404 ui.warn(_("%s not added: only files and symlinks "
1411 ui.warn(_("%s not added: only files and symlinks "
1405 "supported currently\n") % join(f))
1412 "supported currently\n") % join(f))
1406 rejected.append(f)
1413 rejected.append(f)
1407 elif ds[f] in 'amn':
1414 elif ds[f] in 'amn':
1408 ui.warn(_("%s already tracked!\n") % join(f))
1415 ui.warn(_("%s already tracked!\n") % join(f))
1409 elif ds[f] == 'r':
1416 elif ds[f] == 'r':
1410 ds.normallookup(f)
1417 ds.normallookup(f)
1411 else:
1418 else:
1412 ds.add(f)
1419 ds.add(f)
1413 return rejected
1420 return rejected
1414 finally:
1421 finally:
1415 wlock.release()
1422 wlock.release()
1416
1423
1417 def forget(self, files, prefix=""):
1424 def forget(self, files, prefix=""):
1418 join = lambda f: os.path.join(prefix, f)
1425 join = lambda f: os.path.join(prefix, f)
1419 wlock = self._repo.wlock()
1426 wlock = self._repo.wlock()
1420 try:
1427 try:
1421 rejected = []
1428 rejected = []
1422 for f in files:
1429 for f in files:
1423 if f not in self._repo.dirstate:
1430 if f not in self._repo.dirstate:
1424 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1431 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1425 rejected.append(f)
1432 rejected.append(f)
1426 elif self._repo.dirstate[f] != 'a':
1433 elif self._repo.dirstate[f] != 'a':
1427 self._repo.dirstate.remove(f)
1434 self._repo.dirstate.remove(f)
1428 else:
1435 else:
1429 self._repo.dirstate.drop(f)
1436 self._repo.dirstate.drop(f)
1430 return rejected
1437 return rejected
1431 finally:
1438 finally:
1432 wlock.release()
1439 wlock.release()
1433
1440
1434 def undelete(self, list):
1441 def undelete(self, list):
1435 pctxs = self.parents()
1442 pctxs = self.parents()
1436 wlock = self._repo.wlock()
1443 wlock = self._repo.wlock()
1437 try:
1444 try:
1438 for f in list:
1445 for f in list:
1439 if self._repo.dirstate[f] != 'r':
1446 if self._repo.dirstate[f] != 'r':
1440 self._repo.ui.warn(_("%s not removed!\n") % f)
1447 self._repo.ui.warn(_("%s not removed!\n") % f)
1441 else:
1448 else:
1442 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1449 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1443 t = fctx.data()
1450 t = fctx.data()
1444 self._repo.wwrite(f, t, fctx.flags())
1451 self._repo.wwrite(f, t, fctx.flags())
1445 self._repo.dirstate.normal(f)
1452 self._repo.dirstate.normal(f)
1446 finally:
1453 finally:
1447 wlock.release()
1454 wlock.release()
1448
1455
1449 def copy(self, source, dest):
1456 def copy(self, source, dest):
1450 try:
1457 try:
1451 st = self._repo.wvfs.lstat(dest)
1458 st = self._repo.wvfs.lstat(dest)
1452 except OSError as err:
1459 except OSError as err:
1453 if err.errno != errno.ENOENT:
1460 if err.errno != errno.ENOENT:
1454 raise
1461 raise
1455 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1462 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1456 return
1463 return
1457 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1464 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1458 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1465 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1459 "symbolic link\n") % dest)
1466 "symbolic link\n") % dest)
1460 else:
1467 else:
1461 wlock = self._repo.wlock()
1468 wlock = self._repo.wlock()
1462 try:
1469 try:
1463 if self._repo.dirstate[dest] in '?':
1470 if self._repo.dirstate[dest] in '?':
1464 self._repo.dirstate.add(dest)
1471 self._repo.dirstate.add(dest)
1465 elif self._repo.dirstate[dest] in 'r':
1472 elif self._repo.dirstate[dest] in 'r':
1466 self._repo.dirstate.normallookup(dest)
1473 self._repo.dirstate.normallookup(dest)
1467 self._repo.dirstate.copy(source, dest)
1474 self._repo.dirstate.copy(source, dest)
1468 finally:
1475 finally:
1469 wlock.release()
1476 wlock.release()
1470
1477
1471 def match(self, pats=[], include=None, exclude=None, default='glob',
1478 def match(self, pats=[], include=None, exclude=None, default='glob',
1472 listsubrepos=False, badfn=None):
1479 listsubrepos=False, badfn=None):
1473 r = self._repo
1480 r = self._repo
1474
1481
1475 # Only a case insensitive filesystem needs magic to translate user input
1482 # Only a case insensitive filesystem needs magic to translate user input
1476 # to actual case in the filesystem.
1483 # to actual case in the filesystem.
1477 if not util.checkcase(r.root):
1484 if not util.checkcase(r.root):
1478 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1485 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1479 exclude, default, r.auditor, self,
1486 exclude, default, r.auditor, self,
1480 listsubrepos=listsubrepos,
1487 listsubrepos=listsubrepos,
1481 badfn=badfn)
1488 badfn=badfn)
1482 return matchmod.match(r.root, r.getcwd(), pats,
1489 return matchmod.match(r.root, r.getcwd(), pats,
1483 include, exclude, default,
1490 include, exclude, default,
1484 auditor=r.auditor, ctx=self,
1491 auditor=r.auditor, ctx=self,
1485 listsubrepos=listsubrepos, badfn=badfn)
1492 listsubrepos=listsubrepos, badfn=badfn)
1486
1493
1487 def _filtersuspectsymlink(self, files):
1494 def _filtersuspectsymlink(self, files):
1488 if not files or self._repo.dirstate._checklink:
1495 if not files or self._repo.dirstate._checklink:
1489 return files
1496 return files
1490
1497
1491 # Symlink placeholders may get non-symlink-like contents
1498 # Symlink placeholders may get non-symlink-like contents
1492 # via user error or dereferencing by NFS or Samba servers,
1499 # via user error or dereferencing by NFS or Samba servers,
1493 # so we filter out any placeholders that don't look like a
1500 # so we filter out any placeholders that don't look like a
1494 # symlink
1501 # symlink
1495 sane = []
1502 sane = []
1496 for f in files:
1503 for f in files:
1497 if self.flags(f) == 'l':
1504 if self.flags(f) == 'l':
1498 d = self[f].data()
1505 d = self[f].data()
1499 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1506 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1500 self._repo.ui.debug('ignoring suspect symlink placeholder'
1507 self._repo.ui.debug('ignoring suspect symlink placeholder'
1501 ' "%s"\n' % f)
1508 ' "%s"\n' % f)
1502 continue
1509 continue
1503 sane.append(f)
1510 sane.append(f)
1504 return sane
1511 return sane
1505
1512
1506 def _checklookup(self, files):
1513 def _checklookup(self, files):
1507 # check for any possibly clean files
1514 # check for any possibly clean files
1508 if not files:
1515 if not files:
1509 return [], []
1516 return [], []
1510
1517
1511 modified = []
1518 modified = []
1512 fixup = []
1519 fixup = []
1513 pctx = self._parents[0]
1520 pctx = self._parents[0]
1514 # do a full compare of any files that might have changed
1521 # do a full compare of any files that might have changed
1515 for f in sorted(files):
1522 for f in sorted(files):
1516 if (f not in pctx or self.flags(f) != pctx.flags(f)
1523 if (f not in pctx or self.flags(f) != pctx.flags(f)
1517 or pctx[f].cmp(self[f])):
1524 or pctx[f].cmp(self[f])):
1518 modified.append(f)
1525 modified.append(f)
1519 else:
1526 else:
1520 fixup.append(f)
1527 fixup.append(f)
1521
1528
1522 # update dirstate for files that are actually clean
1529 # update dirstate for files that are actually clean
1523 if fixup:
1530 if fixup:
1524 try:
1531 try:
1525 # updating the dirstate is optional
1532 # updating the dirstate is optional
1526 # so we don't wait on the lock
1533 # so we don't wait on the lock
1527 # wlock can invalidate the dirstate, so cache normal _after_
1534 # wlock can invalidate the dirstate, so cache normal _after_
1528 # taking the lock
1535 # taking the lock
1529 wlock = self._repo.wlock(False)
1536 wlock = self._repo.wlock(False)
1530 normal = self._repo.dirstate.normal
1537 normal = self._repo.dirstate.normal
1531 try:
1538 try:
1532 for f in fixup:
1539 for f in fixup:
1533 normal(f)
1540 normal(f)
1534 # write changes out explicitly, because nesting
1541 # write changes out explicitly, because nesting
1535 # wlock at runtime may prevent 'wlock.release()'
1542 # wlock at runtime may prevent 'wlock.release()'
1536 # below from doing so for subsequent changing files
1543 # below from doing so for subsequent changing files
1537 self._repo.dirstate.write(self._repo.currenttransaction())
1544 self._repo.dirstate.write(self._repo.currenttransaction())
1538 finally:
1545 finally:
1539 wlock.release()
1546 wlock.release()
1540 except error.LockError:
1547 except error.LockError:
1541 pass
1548 pass
1542 return modified, fixup
1549 return modified, fixup
1543
1550
1544 def _manifestmatches(self, match, s):
1551 def _manifestmatches(self, match, s):
1545 """Slow path for workingctx
1552 """Slow path for workingctx
1546
1553
1547 The fast path is when we compare the working directory to its parent
1554 The fast path is when we compare the working directory to its parent
1548 which means this function is comparing with a non-parent; therefore we
1555 which means this function is comparing with a non-parent; therefore we
1549 need to build a manifest and return what matches.
1556 need to build a manifest and return what matches.
1550 """
1557 """
1551 mf = self._repo['.']._manifestmatches(match, s)
1558 mf = self._repo['.']._manifestmatches(match, s)
1552 for f in s.modified + s.added:
1559 for f in s.modified + s.added:
1553 mf[f] = _newnode
1560 mf[f] = _newnode
1554 mf.setflag(f, self.flags(f))
1561 mf.setflag(f, self.flags(f))
1555 for f in s.removed:
1562 for f in s.removed:
1556 if f in mf:
1563 if f in mf:
1557 del mf[f]
1564 del mf[f]
1558 return mf
1565 return mf
1559
1566
1560 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1567 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1561 unknown=False):
1568 unknown=False):
1562 '''Gets the status from the dirstate -- internal use only.'''
1569 '''Gets the status from the dirstate -- internal use only.'''
1563 listignored, listclean, listunknown = ignored, clean, unknown
1570 listignored, listclean, listunknown = ignored, clean, unknown
1564 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1571 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1565 subrepos = []
1572 subrepos = []
1566 if '.hgsub' in self:
1573 if '.hgsub' in self:
1567 subrepos = sorted(self.substate)
1574 subrepos = sorted(self.substate)
1568 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1575 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1569 listclean, listunknown)
1576 listclean, listunknown)
1570
1577
1571 # check for any possibly clean files
1578 # check for any possibly clean files
1572 if cmp:
1579 if cmp:
1573 modified2, fixup = self._checklookup(cmp)
1580 modified2, fixup = self._checklookup(cmp)
1574 s.modified.extend(modified2)
1581 s.modified.extend(modified2)
1575
1582
1576 # update dirstate for files that are actually clean
1583 # update dirstate for files that are actually clean
1577 if fixup and listclean:
1584 if fixup and listclean:
1578 s.clean.extend(fixup)
1585 s.clean.extend(fixup)
1579
1586
1580 if match.always():
1587 if match.always():
1581 # cache for performance
1588 # cache for performance
1582 if s.unknown or s.ignored or s.clean:
1589 if s.unknown or s.ignored or s.clean:
1583 # "_status" is cached with list*=False in the normal route
1590 # "_status" is cached with list*=False in the normal route
1584 self._status = scmutil.status(s.modified, s.added, s.removed,
1591 self._status = scmutil.status(s.modified, s.added, s.removed,
1585 s.deleted, [], [], [])
1592 s.deleted, [], [], [])
1586 else:
1593 else:
1587 self._status = s
1594 self._status = s
1588
1595
1589 return s
1596 return s
1590
1597
1591 def _buildstatus(self, other, s, match, listignored, listclean,
1598 def _buildstatus(self, other, s, match, listignored, listclean,
1592 listunknown):
1599 listunknown):
1593 """build a status with respect to another context
1600 """build a status with respect to another context
1594
1601
1595 This includes logic for maintaining the fast path of status when
1602 This includes logic for maintaining the fast path of status when
1596 comparing the working directory against its parent, which is to skip
1603 comparing the working directory against its parent, which is to skip
1597 building a new manifest if self (working directory) is not comparing
1604 building a new manifest if self (working directory) is not comparing
1598 against its parent (repo['.']).
1605 against its parent (repo['.']).
1599 """
1606 """
1600 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1607 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1601 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1608 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1602 # might have accidentally ended up with the entire contents of the file
1609 # might have accidentally ended up with the entire contents of the file
1603 # they are supposed to be linking to.
1610 # they are supposed to be linking to.
1604 s.modified[:] = self._filtersuspectsymlink(s.modified)
1611 s.modified[:] = self._filtersuspectsymlink(s.modified)
1605 if other != self._repo['.']:
1612 if other != self._repo['.']:
1606 s = super(workingctx, self)._buildstatus(other, s, match,
1613 s = super(workingctx, self)._buildstatus(other, s, match,
1607 listignored, listclean,
1614 listignored, listclean,
1608 listunknown)
1615 listunknown)
1609 return s
1616 return s
1610
1617
1611 def _matchstatus(self, other, match):
1618 def _matchstatus(self, other, match):
1612 """override the match method with a filter for directory patterns
1619 """override the match method with a filter for directory patterns
1613
1620
1614 We use inheritance to customize the match.bad method only in cases of
1621 We use inheritance to customize the match.bad method only in cases of
1615 workingctx since it belongs only to the working directory when
1622 workingctx since it belongs only to the working directory when
1616 comparing against the parent changeset.
1623 comparing against the parent changeset.
1617
1624
1618 If we aren't comparing against the working directory's parent, then we
1625 If we aren't comparing against the working directory's parent, then we
1619 just use the default match object sent to us.
1626 just use the default match object sent to us.
1620 """
1627 """
1621 superself = super(workingctx, self)
1628 superself = super(workingctx, self)
1622 match = superself._matchstatus(other, match)
1629 match = superself._matchstatus(other, match)
1623 if other != self._repo['.']:
1630 if other != self._repo['.']:
1624 def bad(f, msg):
1631 def bad(f, msg):
1625 # 'f' may be a directory pattern from 'match.files()',
1632 # 'f' may be a directory pattern from 'match.files()',
1626 # so 'f not in ctx1' is not enough
1633 # so 'f not in ctx1' is not enough
1627 if f not in other and not other.hasdir(f):
1634 if f not in other and not other.hasdir(f):
1628 self._repo.ui.warn('%s: %s\n' %
1635 self._repo.ui.warn('%s: %s\n' %
1629 (self._repo.dirstate.pathto(f), msg))
1636 (self._repo.dirstate.pathto(f), msg))
1630 match.bad = bad
1637 match.bad = bad
1631 return match
1638 return match
1632
1639
1633 class committablefilectx(basefilectx):
1640 class committablefilectx(basefilectx):
1634 """A committablefilectx provides common functionality for a file context
1641 """A committablefilectx provides common functionality for a file context
1635 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1642 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1636 def __init__(self, repo, path, filelog=None, ctx=None):
1643 def __init__(self, repo, path, filelog=None, ctx=None):
1637 self._repo = repo
1644 self._repo = repo
1638 self._path = path
1645 self._path = path
1639 self._changeid = None
1646 self._changeid = None
1640 self._filerev = self._filenode = None
1647 self._filerev = self._filenode = None
1641
1648
1642 if filelog is not None:
1649 if filelog is not None:
1643 self._filelog = filelog
1650 self._filelog = filelog
1644 if ctx:
1651 if ctx:
1645 self._changectx = ctx
1652 self._changectx = ctx
1646
1653
1647 def __nonzero__(self):
1654 def __nonzero__(self):
1648 return True
1655 return True
1649
1656
1650 def linkrev(self):
1657 def linkrev(self):
1651 # linked to self._changectx no matter if file is modified or not
1658 # linked to self._changectx no matter if file is modified or not
1652 return self.rev()
1659 return self.rev()
1653
1660
1654 def parents(self):
1661 def parents(self):
1655 '''return parent filectxs, following copies if necessary'''
1662 '''return parent filectxs, following copies if necessary'''
1656 def filenode(ctx, path):
1663 def filenode(ctx, path):
1657 return ctx._manifest.get(path, nullid)
1664 return ctx._manifest.get(path, nullid)
1658
1665
1659 path = self._path
1666 path = self._path
1660 fl = self._filelog
1667 fl = self._filelog
1661 pcl = self._changectx._parents
1668 pcl = self._changectx._parents
1662 renamed = self.renamed()
1669 renamed = self.renamed()
1663
1670
1664 if renamed:
1671 if renamed:
1665 pl = [renamed + (None,)]
1672 pl = [renamed + (None,)]
1666 else:
1673 else:
1667 pl = [(path, filenode(pcl[0], path), fl)]
1674 pl = [(path, filenode(pcl[0], path), fl)]
1668
1675
1669 for pc in pcl[1:]:
1676 for pc in pcl[1:]:
1670 pl.append((path, filenode(pc, path), fl))
1677 pl.append((path, filenode(pc, path), fl))
1671
1678
1672 return [self._parentfilectx(p, fileid=n, filelog=l)
1679 return [self._parentfilectx(p, fileid=n, filelog=l)
1673 for p, n, l in pl if n != nullid]
1680 for p, n, l in pl if n != nullid]
1674
1681
1675 def children(self):
1682 def children(self):
1676 return []
1683 return []
1677
1684
1678 class workingfilectx(committablefilectx):
1685 class workingfilectx(committablefilectx):
1679 """A workingfilectx object makes access to data related to a particular
1686 """A workingfilectx object makes access to data related to a particular
1680 file in the working directory convenient."""
1687 file in the working directory convenient."""
1681 def __init__(self, repo, path, filelog=None, workingctx=None):
1688 def __init__(self, repo, path, filelog=None, workingctx=None):
1682 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1689 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1683
1690
1684 @propertycache
1691 @propertycache
1685 def _changectx(self):
1692 def _changectx(self):
1686 return workingctx(self._repo)
1693 return workingctx(self._repo)
1687
1694
1688 def data(self):
1695 def data(self):
1689 return self._repo.wread(self._path)
1696 return self._repo.wread(self._path)
1690 def renamed(self):
1697 def renamed(self):
1691 rp = self._repo.dirstate.copied(self._path)
1698 rp = self._repo.dirstate.copied(self._path)
1692 if not rp:
1699 if not rp:
1693 return None
1700 return None
1694 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1701 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1695
1702
1696 def size(self):
1703 def size(self):
1697 return self._repo.wvfs.lstat(self._path).st_size
1704 return self._repo.wvfs.lstat(self._path).st_size
1698 def date(self):
1705 def date(self):
1699 t, tz = self._changectx.date()
1706 t, tz = self._changectx.date()
1700 try:
1707 try:
1701 return (util.statmtimesec(self._repo.wvfs.lstat(self._path)), tz)
1708 return (util.statmtimesec(self._repo.wvfs.lstat(self._path)), tz)
1702 except OSError as err:
1709 except OSError as err:
1703 if err.errno != errno.ENOENT:
1710 if err.errno != errno.ENOENT:
1704 raise
1711 raise
1705 return (t, tz)
1712 return (t, tz)
1706
1713
1707 def cmp(self, fctx):
1714 def cmp(self, fctx):
1708 """compare with other file context
1715 """compare with other file context
1709
1716
1710 returns True if different than fctx.
1717 returns True if different than fctx.
1711 """
1718 """
1712 # fctx should be a filectx (not a workingfilectx)
1719 # fctx should be a filectx (not a workingfilectx)
1713 # invert comparison to reuse the same code path
1720 # invert comparison to reuse the same code path
1714 return fctx.cmp(self)
1721 return fctx.cmp(self)
1715
1722
1716 def remove(self, ignoremissing=False):
1723 def remove(self, ignoremissing=False):
1717 """wraps unlink for a repo's working directory"""
1724 """wraps unlink for a repo's working directory"""
1718 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1725 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1719
1726
1720 def write(self, data, flags):
1727 def write(self, data, flags):
1721 """wraps repo.wwrite"""
1728 """wraps repo.wwrite"""
1722 self._repo.wwrite(self._path, data, flags)
1729 self._repo.wwrite(self._path, data, flags)
1723
1730
1724 class workingcommitctx(workingctx):
1731 class workingcommitctx(workingctx):
1725 """A workingcommitctx object makes access to data related to
1732 """A workingcommitctx object makes access to data related to
1726 the revision being committed convenient.
1733 the revision being committed convenient.
1727
1734
1728 This hides changes in the working directory, if they aren't
1735 This hides changes in the working directory, if they aren't
1729 committed in this context.
1736 committed in this context.
1730 """
1737 """
1731 def __init__(self, repo, changes,
1738 def __init__(self, repo, changes,
1732 text="", user=None, date=None, extra=None):
1739 text="", user=None, date=None, extra=None):
1733 super(workingctx, self).__init__(repo, text, user, date, extra,
1740 super(workingctx, self).__init__(repo, text, user, date, extra,
1734 changes)
1741 changes)
1735
1742
1736 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1743 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1737 unknown=False):
1744 unknown=False):
1738 """Return matched files only in ``self._status``
1745 """Return matched files only in ``self._status``
1739
1746
1740 Uncommitted files appear "clean" via this context, even if
1747 Uncommitted files appear "clean" via this context, even if
1741 they aren't actually so in the working directory.
1748 they aren't actually so in the working directory.
1742 """
1749 """
1743 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1750 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1744 if clean:
1751 if clean:
1745 clean = [f for f in self._manifest if f not in self._changedset]
1752 clean = [f for f in self._manifest if f not in self._changedset]
1746 else:
1753 else:
1747 clean = []
1754 clean = []
1748 return scmutil.status([f for f in self._status.modified if match(f)],
1755 return scmutil.status([f for f in self._status.modified if match(f)],
1749 [f for f in self._status.added if match(f)],
1756 [f for f in self._status.added if match(f)],
1750 [f for f in self._status.removed if match(f)],
1757 [f for f in self._status.removed if match(f)],
1751 [], [], [], clean)
1758 [], [], [], clean)
1752
1759
1753 @propertycache
1760 @propertycache
1754 def _changedset(self):
1761 def _changedset(self):
1755 """Return the set of files changed in this context
1762 """Return the set of files changed in this context
1756 """
1763 """
1757 changed = set(self._status.modified)
1764 changed = set(self._status.modified)
1758 changed.update(self._status.added)
1765 changed.update(self._status.added)
1759 changed.update(self._status.removed)
1766 changed.update(self._status.removed)
1760 return changed
1767 return changed
1761
1768
1762 class memctx(committablectx):
1769 class memctx(committablectx):
1763 """Use memctx to perform in-memory commits via localrepo.commitctx().
1770 """Use memctx to perform in-memory commits via localrepo.commitctx().
1764
1771
1765 Revision information is supplied at initialization time while
1772 Revision information is supplied at initialization time while
1766 related files data and is made available through a callback
1773 related files data and is made available through a callback
1767 mechanism. 'repo' is the current localrepo, 'parents' is a
1774 mechanism. 'repo' is the current localrepo, 'parents' is a
1768 sequence of two parent revisions identifiers (pass None for every
1775 sequence of two parent revisions identifiers (pass None for every
1769 missing parent), 'text' is the commit message and 'files' lists
1776 missing parent), 'text' is the commit message and 'files' lists
1770 names of files touched by the revision (normalized and relative to
1777 names of files touched by the revision (normalized and relative to
1771 repository root).
1778 repository root).
1772
1779
1773 filectxfn(repo, memctx, path) is a callable receiving the
1780 filectxfn(repo, memctx, path) is a callable receiving the
1774 repository, the current memctx object and the normalized path of
1781 repository, the current memctx object and the normalized path of
1775 requested file, relative to repository root. It is fired by the
1782 requested file, relative to repository root. It is fired by the
1776 commit function for every file in 'files', but calls order is
1783 commit function for every file in 'files', but calls order is
1777 undefined. If the file is available in the revision being
1784 undefined. If the file is available in the revision being
1778 committed (updated or added), filectxfn returns a memfilectx
1785 committed (updated or added), filectxfn returns a memfilectx
1779 object. If the file was removed, filectxfn raises an
1786 object. If the file was removed, filectxfn raises an
1780 IOError. Moved files are represented by marking the source file
1787 IOError. Moved files are represented by marking the source file
1781 removed and the new file added with copy information (see
1788 removed and the new file added with copy information (see
1782 memfilectx).
1789 memfilectx).
1783
1790
1784 user receives the committer name and defaults to current
1791 user receives the committer name and defaults to current
1785 repository username, date is the commit date in any format
1792 repository username, date is the commit date in any format
1786 supported by util.parsedate() and defaults to current date, extra
1793 supported by util.parsedate() and defaults to current date, extra
1787 is a dictionary of metadata or is left empty.
1794 is a dictionary of metadata or is left empty.
1788 """
1795 """
1789
1796
1790 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1797 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1791 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1798 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1792 # this field to determine what to do in filectxfn.
1799 # this field to determine what to do in filectxfn.
1793 _returnnoneformissingfiles = True
1800 _returnnoneformissingfiles = True
1794
1801
1795 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1802 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1796 date=None, extra=None, editor=False):
1803 date=None, extra=None, editor=False):
1797 super(memctx, self).__init__(repo, text, user, date, extra)
1804 super(memctx, self).__init__(repo, text, user, date, extra)
1798 self._rev = None
1805 self._rev = None
1799 self._node = None
1806 self._node = None
1800 parents = [(p or nullid) for p in parents]
1807 parents = [(p or nullid) for p in parents]
1801 p1, p2 = parents
1808 p1, p2 = parents
1802 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1809 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1803 files = sorted(set(files))
1810 files = sorted(set(files))
1804 self._files = files
1811 self._files = files
1805 self.substate = {}
1812 self.substate = {}
1806
1813
1807 # if store is not callable, wrap it in a function
1814 # if store is not callable, wrap it in a function
1808 if not callable(filectxfn):
1815 if not callable(filectxfn):
1809 def getfilectx(repo, memctx, path):
1816 def getfilectx(repo, memctx, path):
1810 fctx = filectxfn[path]
1817 fctx = filectxfn[path]
1811 # this is weird but apparently we only keep track of one parent
1818 # this is weird but apparently we only keep track of one parent
1812 # (why not only store that instead of a tuple?)
1819 # (why not only store that instead of a tuple?)
1813 copied = fctx.renamed()
1820 copied = fctx.renamed()
1814 if copied:
1821 if copied:
1815 copied = copied[0]
1822 copied = copied[0]
1816 return memfilectx(repo, path, fctx.data(),
1823 return memfilectx(repo, path, fctx.data(),
1817 islink=fctx.islink(), isexec=fctx.isexec(),
1824 islink=fctx.islink(), isexec=fctx.isexec(),
1818 copied=copied, memctx=memctx)
1825 copied=copied, memctx=memctx)
1819 self._filectxfn = getfilectx
1826 self._filectxfn = getfilectx
1820 else:
1827 else:
1821 # "util.cachefunc" reduces invocation of possibly expensive
1828 # "util.cachefunc" reduces invocation of possibly expensive
1822 # "filectxfn" for performance (e.g. converting from another VCS)
1829 # "filectxfn" for performance (e.g. converting from another VCS)
1823 self._filectxfn = util.cachefunc(filectxfn)
1830 self._filectxfn = util.cachefunc(filectxfn)
1824
1831
1825 if extra:
1832 if extra:
1826 self._extra = extra.copy()
1833 self._extra = extra.copy()
1827 else:
1834 else:
1828 self._extra = {}
1835 self._extra = {}
1829
1836
1830 if self._extra.get('branch', '') == '':
1837 if self._extra.get('branch', '') == '':
1831 self._extra['branch'] = 'default'
1838 self._extra['branch'] = 'default'
1832
1839
1833 if editor:
1840 if editor:
1834 self._text = editor(self._repo, self, [])
1841 self._text = editor(self._repo, self, [])
1835 self._repo.savecommitmessage(self._text)
1842 self._repo.savecommitmessage(self._text)
1836
1843
1837 def filectx(self, path, filelog=None):
1844 def filectx(self, path, filelog=None):
1838 """get a file context from the working directory
1845 """get a file context from the working directory
1839
1846
1840 Returns None if file doesn't exist and should be removed."""
1847 Returns None if file doesn't exist and should be removed."""
1841 return self._filectxfn(self._repo, self, path)
1848 return self._filectxfn(self._repo, self, path)
1842
1849
1843 def commit(self):
1850 def commit(self):
1844 """commit context to the repo"""
1851 """commit context to the repo"""
1845 return self._repo.commitctx(self)
1852 return self._repo.commitctx(self)
1846
1853
1847 @propertycache
1854 @propertycache
1848 def _manifest(self):
1855 def _manifest(self):
1849 """generate a manifest based on the return values of filectxfn"""
1856 """generate a manifest based on the return values of filectxfn"""
1850
1857
1851 # keep this simple for now; just worry about p1
1858 # keep this simple for now; just worry about p1
1852 pctx = self._parents[0]
1859 pctx = self._parents[0]
1853 man = pctx.manifest().copy()
1860 man = pctx.manifest().copy()
1854
1861
1855 for f in self._status.modified:
1862 for f in self._status.modified:
1856 p1node = nullid
1863 p1node = nullid
1857 p2node = nullid
1864 p2node = nullid
1858 p = pctx[f].parents() # if file isn't in pctx, check p2?
1865 p = pctx[f].parents() # if file isn't in pctx, check p2?
1859 if len(p) > 0:
1866 if len(p) > 0:
1860 p1node = p[0].node()
1867 p1node = p[0].node()
1861 if len(p) > 1:
1868 if len(p) > 1:
1862 p2node = p[1].node()
1869 p2node = p[1].node()
1863 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1870 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1864
1871
1865 for f in self._status.added:
1872 for f in self._status.added:
1866 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1873 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1867
1874
1868 for f in self._status.removed:
1875 for f in self._status.removed:
1869 if f in man:
1876 if f in man:
1870 del man[f]
1877 del man[f]
1871
1878
1872 return man
1879 return man
1873
1880
1874 @propertycache
1881 @propertycache
1875 def _status(self):
1882 def _status(self):
1876 """Calculate exact status from ``files`` specified at construction
1883 """Calculate exact status from ``files`` specified at construction
1877 """
1884 """
1878 man1 = self.p1().manifest()
1885 man1 = self.p1().manifest()
1879 p2 = self._parents[1]
1886 p2 = self._parents[1]
1880 # "1 < len(self._parents)" can't be used for checking
1887 # "1 < len(self._parents)" can't be used for checking
1881 # existence of the 2nd parent, because "memctx._parents" is
1888 # existence of the 2nd parent, because "memctx._parents" is
1882 # explicitly initialized by the list, of which length is 2.
1889 # explicitly initialized by the list, of which length is 2.
1883 if p2.node() != nullid:
1890 if p2.node() != nullid:
1884 man2 = p2.manifest()
1891 man2 = p2.manifest()
1885 managing = lambda f: f in man1 or f in man2
1892 managing = lambda f: f in man1 or f in man2
1886 else:
1893 else:
1887 managing = lambda f: f in man1
1894 managing = lambda f: f in man1
1888
1895
1889 modified, added, removed = [], [], []
1896 modified, added, removed = [], [], []
1890 for f in self._files:
1897 for f in self._files:
1891 if not managing(f):
1898 if not managing(f):
1892 added.append(f)
1899 added.append(f)
1893 elif self[f]:
1900 elif self[f]:
1894 modified.append(f)
1901 modified.append(f)
1895 else:
1902 else:
1896 removed.append(f)
1903 removed.append(f)
1897
1904
1898 return scmutil.status(modified, added, removed, [], [], [], [])
1905 return scmutil.status(modified, added, removed, [], [], [], [])
1899
1906
1900 class memfilectx(committablefilectx):
1907 class memfilectx(committablefilectx):
1901 """memfilectx represents an in-memory file to commit.
1908 """memfilectx represents an in-memory file to commit.
1902
1909
1903 See memctx and committablefilectx for more details.
1910 See memctx and committablefilectx for more details.
1904 """
1911 """
1905 def __init__(self, repo, path, data, islink=False,
1912 def __init__(self, repo, path, data, islink=False,
1906 isexec=False, copied=None, memctx=None):
1913 isexec=False, copied=None, memctx=None):
1907 """
1914 """
1908 path is the normalized file path relative to repository root.
1915 path is the normalized file path relative to repository root.
1909 data is the file content as a string.
1916 data is the file content as a string.
1910 islink is True if the file is a symbolic link.
1917 islink is True if the file is a symbolic link.
1911 isexec is True if the file is executable.
1918 isexec is True if the file is executable.
1912 copied is the source file path if current file was copied in the
1919 copied is the source file path if current file was copied in the
1913 revision being committed, or None."""
1920 revision being committed, or None."""
1914 super(memfilectx, self).__init__(repo, path, None, memctx)
1921 super(memfilectx, self).__init__(repo, path, None, memctx)
1915 self._data = data
1922 self._data = data
1916 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1923 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1917 self._copied = None
1924 self._copied = None
1918 if copied:
1925 if copied:
1919 self._copied = (copied, nullid)
1926 self._copied = (copied, nullid)
1920
1927
1921 def data(self):
1928 def data(self):
1922 return self._data
1929 return self._data
1923 def size(self):
1930 def size(self):
1924 return len(self.data())
1931 return len(self.data())
1925 def flags(self):
1932 def flags(self):
1926 return self._flags
1933 return self._flags
1927 def renamed(self):
1934 def renamed(self):
1928 return self._copied
1935 return self._copied
1929
1936
1930 def remove(self, ignoremissing=False):
1937 def remove(self, ignoremissing=False):
1931 """wraps unlink for a repo's working directory"""
1938 """wraps unlink for a repo's working directory"""
1932 # need to figure out what to do here
1939 # need to figure out what to do here
1933 del self._changectx[self._path]
1940 del self._changectx[self._path]
1934
1941
1935 def write(self, data, flags):
1942 def write(self, data, flags):
1936 """wraps repo.wwrite"""
1943 """wraps repo.wwrite"""
1937 self._data = data
1944 self._data = data
General Comments 0
You need to be logged in to leave comments. Login now