##// END OF EJS Templates
patch: add 'extra' argument to makememctx...
Laurent Charignon -
r25303:b7876b8f default
parent child Browse files
Show More
@@ -1,1909 +1,1910 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import copy, os, errno, stat
12 import copy, os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 # Phony node value to stand-in for new files in some uses of
20 # Phony node value to stand-in for new files in some uses of
21 # manifests. Manifests support 21-byte hashes for nodes which are
21 # manifests. Manifests support 21-byte hashes for nodes which are
22 # dirty in the working copy.
22 # dirty in the working copy.
23 _newnode = '!' * 21
23 _newnode = '!' * 21
24
24
25 class basectx(object):
25 class basectx(object):
26 """A basectx object represents the common logic for its children:
26 """A basectx object represents the common logic for its children:
27 changectx: read-only context that is already present in the repo,
27 changectx: read-only context that is already present in the repo,
28 workingctx: a context that represents the working directory and can
28 workingctx: a context that represents the working directory and can
29 be committed,
29 be committed,
30 memctx: a context that represents changes in-memory and can also
30 memctx: a context that represents changes in-memory and can also
31 be committed."""
31 be committed."""
32 def __new__(cls, repo, changeid='', *args, **kwargs):
32 def __new__(cls, repo, changeid='', *args, **kwargs):
33 if isinstance(changeid, basectx):
33 if isinstance(changeid, basectx):
34 return changeid
34 return changeid
35
35
36 o = super(basectx, cls).__new__(cls)
36 o = super(basectx, cls).__new__(cls)
37
37
38 o._repo = repo
38 o._repo = repo
39 o._rev = nullrev
39 o._rev = nullrev
40 o._node = nullid
40 o._node = nullid
41
41
42 return o
42 return o
43
43
44 def __str__(self):
44 def __str__(self):
45 return short(self.node())
45 return short(self.node())
46
46
47 def __int__(self):
47 def __int__(self):
48 return self.rev()
48 return self.rev()
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return "<%s %s>" % (type(self).__name__, str(self))
51 return "<%s %s>" % (type(self).__name__, str(self))
52
52
53 def __eq__(self, other):
53 def __eq__(self, other):
54 try:
54 try:
55 return type(self) == type(other) and self._rev == other._rev
55 return type(self) == type(other) and self._rev == other._rev
56 except AttributeError:
56 except AttributeError:
57 return False
57 return False
58
58
59 def __ne__(self, other):
59 def __ne__(self, other):
60 return not (self == other)
60 return not (self == other)
61
61
62 def __contains__(self, key):
62 def __contains__(self, key):
63 return key in self._manifest
63 return key in self._manifest
64
64
65 def __getitem__(self, key):
65 def __getitem__(self, key):
66 return self.filectx(key)
66 return self.filectx(key)
67
67
68 def __iter__(self):
68 def __iter__(self):
69 return iter(self._manifest)
69 return iter(self._manifest)
70
70
71 def _manifestmatches(self, match, s):
71 def _manifestmatches(self, match, s):
72 """generate a new manifest filtered by the match argument
72 """generate a new manifest filtered by the match argument
73
73
74 This method is for internal use only and mainly exists to provide an
74 This method is for internal use only and mainly exists to provide an
75 object oriented way for other contexts to customize the manifest
75 object oriented way for other contexts to customize the manifest
76 generation.
76 generation.
77 """
77 """
78 return self.manifest().matches(match)
78 return self.manifest().matches(match)
79
79
80 def _matchstatus(self, other, match):
80 def _matchstatus(self, other, match):
81 """return match.always if match is none
81 """return match.always if match is none
82
82
83 This internal method provides a way for child objects to override the
83 This internal method provides a way for child objects to override the
84 match operator.
84 match operator.
85 """
85 """
86 return match or matchmod.always(self._repo.root, self._repo.getcwd())
86 return match or matchmod.always(self._repo.root, self._repo.getcwd())
87
87
88 def _buildstatus(self, other, s, match, listignored, listclean,
88 def _buildstatus(self, other, s, match, listignored, listclean,
89 listunknown):
89 listunknown):
90 """build a status with respect to another context"""
90 """build a status with respect to another context"""
91 # Load earliest manifest first for caching reasons. More specifically,
91 # Load earliest manifest first for caching reasons. More specifically,
92 # if you have revisions 1000 and 1001, 1001 is probably stored as a
92 # if you have revisions 1000 and 1001, 1001 is probably stored as a
93 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
93 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
94 # 1000 and cache it so that when you read 1001, we just need to apply a
94 # 1000 and cache it so that when you read 1001, we just need to apply a
95 # delta to what's in the cache. So that's one full reconstruction + one
95 # delta to what's in the cache. So that's one full reconstruction + one
96 # delta application.
96 # delta application.
97 if self.rev() is not None and self.rev() < other.rev():
97 if self.rev() is not None and self.rev() < other.rev():
98 self.manifest()
98 self.manifest()
99 mf1 = other._manifestmatches(match, s)
99 mf1 = other._manifestmatches(match, s)
100 mf2 = self._manifestmatches(match, s)
100 mf2 = self._manifestmatches(match, s)
101
101
102 modified, added = [], []
102 modified, added = [], []
103 removed = []
103 removed = []
104 clean = []
104 clean = []
105 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
105 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
106 deletedset = set(deleted)
106 deletedset = set(deleted)
107 d = mf1.diff(mf2, clean=listclean)
107 d = mf1.diff(mf2, clean=listclean)
108 for fn, value in d.iteritems():
108 for fn, value in d.iteritems():
109 if fn in deletedset:
109 if fn in deletedset:
110 continue
110 continue
111 if value is None:
111 if value is None:
112 clean.append(fn)
112 clean.append(fn)
113 continue
113 continue
114 (node1, flag1), (node2, flag2) = value
114 (node1, flag1), (node2, flag2) = value
115 if node1 is None:
115 if node1 is None:
116 added.append(fn)
116 added.append(fn)
117 elif node2 is None:
117 elif node2 is None:
118 removed.append(fn)
118 removed.append(fn)
119 elif node2 != _newnode:
119 elif node2 != _newnode:
120 # The file was not a new file in mf2, so an entry
120 # The file was not a new file in mf2, so an entry
121 # from diff is really a difference.
121 # from diff is really a difference.
122 modified.append(fn)
122 modified.append(fn)
123 elif self[fn].cmp(other[fn]):
123 elif self[fn].cmp(other[fn]):
124 # node2 was newnode, but the working file doesn't
124 # node2 was newnode, but the working file doesn't
125 # match the one in mf1.
125 # match the one in mf1.
126 modified.append(fn)
126 modified.append(fn)
127 else:
127 else:
128 clean.append(fn)
128 clean.append(fn)
129
129
130 if removed:
130 if removed:
131 # need to filter files if they are already reported as removed
131 # need to filter files if they are already reported as removed
132 unknown = [fn for fn in unknown if fn not in mf1]
132 unknown = [fn for fn in unknown if fn not in mf1]
133 ignored = [fn for fn in ignored if fn not in mf1]
133 ignored = [fn for fn in ignored if fn not in mf1]
134 # if they're deleted, don't report them as removed
134 # if they're deleted, don't report them as removed
135 removed = [fn for fn in removed if fn not in deletedset]
135 removed = [fn for fn in removed if fn not in deletedset]
136
136
137 return scmutil.status(modified, added, removed, deleted, unknown,
137 return scmutil.status(modified, added, removed, deleted, unknown,
138 ignored, clean)
138 ignored, clean)
139
139
140 @propertycache
140 @propertycache
141 def substate(self):
141 def substate(self):
142 return subrepo.state(self, self._repo.ui)
142 return subrepo.state(self, self._repo.ui)
143
143
144 def subrev(self, subpath):
144 def subrev(self, subpath):
145 return self.substate[subpath][1]
145 return self.substate[subpath][1]
146
146
147 def rev(self):
147 def rev(self):
148 return self._rev
148 return self._rev
149 def node(self):
149 def node(self):
150 return self._node
150 return self._node
151 def hex(self):
151 def hex(self):
152 return hex(self.node())
152 return hex(self.node())
153 def manifest(self):
153 def manifest(self):
154 return self._manifest
154 return self._manifest
155 def repo(self):
155 def repo(self):
156 return self._repo
156 return self._repo
157 def phasestr(self):
157 def phasestr(self):
158 return phases.phasenames[self.phase()]
158 return phases.phasenames[self.phase()]
159 def mutable(self):
159 def mutable(self):
160 return self.phase() > phases.public
160 return self.phase() > phases.public
161
161
162 def getfileset(self, expr):
162 def getfileset(self, expr):
163 return fileset.getfileset(self, expr)
163 return fileset.getfileset(self, expr)
164
164
165 def obsolete(self):
165 def obsolete(self):
166 """True if the changeset is obsolete"""
166 """True if the changeset is obsolete"""
167 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
167 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
168
168
169 def extinct(self):
169 def extinct(self):
170 """True if the changeset is extinct"""
170 """True if the changeset is extinct"""
171 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
171 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
172
172
173 def unstable(self):
173 def unstable(self):
174 """True if the changeset is not obsolete but it's ancestor are"""
174 """True if the changeset is not obsolete but it's ancestor are"""
175 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
175 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
176
176
177 def bumped(self):
177 def bumped(self):
178 """True if the changeset try to be a successor of a public changeset
178 """True if the changeset try to be a successor of a public changeset
179
179
180 Only non-public and non-obsolete changesets may be bumped.
180 Only non-public and non-obsolete changesets may be bumped.
181 """
181 """
182 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
182 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
183
183
184 def divergent(self):
184 def divergent(self):
185 """Is a successors of a changeset with multiple possible successors set
185 """Is a successors of a changeset with multiple possible successors set
186
186
187 Only non-public and non-obsolete changesets may be divergent.
187 Only non-public and non-obsolete changesets may be divergent.
188 """
188 """
189 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
189 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
190
190
191 def troubled(self):
191 def troubled(self):
192 """True if the changeset is either unstable, bumped or divergent"""
192 """True if the changeset is either unstable, bumped or divergent"""
193 return self.unstable() or self.bumped() or self.divergent()
193 return self.unstable() or self.bumped() or self.divergent()
194
194
195 def troubles(self):
195 def troubles(self):
196 """return the list of troubles affecting this changesets.
196 """return the list of troubles affecting this changesets.
197
197
198 Troubles are returned as strings. possible values are:
198 Troubles are returned as strings. possible values are:
199 - unstable,
199 - unstable,
200 - bumped,
200 - bumped,
201 - divergent.
201 - divergent.
202 """
202 """
203 troubles = []
203 troubles = []
204 if self.unstable():
204 if self.unstable():
205 troubles.append('unstable')
205 troubles.append('unstable')
206 if self.bumped():
206 if self.bumped():
207 troubles.append('bumped')
207 troubles.append('bumped')
208 if self.divergent():
208 if self.divergent():
209 troubles.append('divergent')
209 troubles.append('divergent')
210 return troubles
210 return troubles
211
211
212 def parents(self):
212 def parents(self):
213 """return contexts for each parent changeset"""
213 """return contexts for each parent changeset"""
214 return self._parents
214 return self._parents
215
215
216 def p1(self):
216 def p1(self):
217 return self._parents[0]
217 return self._parents[0]
218
218
219 def p2(self):
219 def p2(self):
220 if len(self._parents) == 2:
220 if len(self._parents) == 2:
221 return self._parents[1]
221 return self._parents[1]
222 return changectx(self._repo, -1)
222 return changectx(self._repo, -1)
223
223
224 def _fileinfo(self, path):
224 def _fileinfo(self, path):
225 if '_manifest' in self.__dict__:
225 if '_manifest' in self.__dict__:
226 try:
226 try:
227 return self._manifest[path], self._manifest.flags(path)
227 return self._manifest[path], self._manifest.flags(path)
228 except KeyError:
228 except KeyError:
229 raise error.ManifestLookupError(self._node, path,
229 raise error.ManifestLookupError(self._node, path,
230 _('not found in manifest'))
230 _('not found in manifest'))
231 if '_manifestdelta' in self.__dict__ or path in self.files():
231 if '_manifestdelta' in self.__dict__ or path in self.files():
232 if path in self._manifestdelta:
232 if path in self._manifestdelta:
233 return (self._manifestdelta[path],
233 return (self._manifestdelta[path],
234 self._manifestdelta.flags(path))
234 self._manifestdelta.flags(path))
235 node, flag = self._repo.manifest.find(self._changeset[0], path)
235 node, flag = self._repo.manifest.find(self._changeset[0], path)
236 if not node:
236 if not node:
237 raise error.ManifestLookupError(self._node, path,
237 raise error.ManifestLookupError(self._node, path,
238 _('not found in manifest'))
238 _('not found in manifest'))
239
239
240 return node, flag
240 return node, flag
241
241
242 def filenode(self, path):
242 def filenode(self, path):
243 return self._fileinfo(path)[0]
243 return self._fileinfo(path)[0]
244
244
245 def flags(self, path):
245 def flags(self, path):
246 try:
246 try:
247 return self._fileinfo(path)[1]
247 return self._fileinfo(path)[1]
248 except error.LookupError:
248 except error.LookupError:
249 return ''
249 return ''
250
250
251 def sub(self, path):
251 def sub(self, path):
252 return subrepo.subrepo(self, path)
252 return subrepo.subrepo(self, path)
253
253
254 def match(self, pats=[], include=None, exclude=None, default='glob',
254 def match(self, pats=[], include=None, exclude=None, default='glob',
255 listsubrepos=False):
255 listsubrepos=False):
256 r = self._repo
256 r = self._repo
257 return matchmod.match(r.root, r.getcwd(), pats,
257 return matchmod.match(r.root, r.getcwd(), pats,
258 include, exclude, default,
258 include, exclude, default,
259 auditor=r.auditor, ctx=self,
259 auditor=r.auditor, ctx=self,
260 listsubrepos=listsubrepos)
260 listsubrepos=listsubrepos)
261
261
262 def diff(self, ctx2=None, match=None, **opts):
262 def diff(self, ctx2=None, match=None, **opts):
263 """Returns a diff generator for the given contexts and matcher"""
263 """Returns a diff generator for the given contexts and matcher"""
264 if ctx2 is None:
264 if ctx2 is None:
265 ctx2 = self.p1()
265 ctx2 = self.p1()
266 if ctx2 is not None:
266 if ctx2 is not None:
267 ctx2 = self._repo[ctx2]
267 ctx2 = self._repo[ctx2]
268 diffopts = patch.diffopts(self._repo.ui, opts)
268 diffopts = patch.diffopts(self._repo.ui, opts)
269 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
269 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
270
270
271 def dirs(self):
271 def dirs(self):
272 return self._manifest.dirs()
272 return self._manifest.dirs()
273
273
274 def hasdir(self, dir):
274 def hasdir(self, dir):
275 return self._manifest.hasdir(dir)
275 return self._manifest.hasdir(dir)
276
276
277 def dirty(self, missing=False, merge=True, branch=True):
277 def dirty(self, missing=False, merge=True, branch=True):
278 return False
278 return False
279
279
280 def status(self, other=None, match=None, listignored=False,
280 def status(self, other=None, match=None, listignored=False,
281 listclean=False, listunknown=False, listsubrepos=False):
281 listclean=False, listunknown=False, listsubrepos=False):
282 """return status of files between two nodes or node and working
282 """return status of files between two nodes or node and working
283 directory.
283 directory.
284
284
285 If other is None, compare this node with working directory.
285 If other is None, compare this node with working directory.
286
286
287 returns (modified, added, removed, deleted, unknown, ignored, clean)
287 returns (modified, added, removed, deleted, unknown, ignored, clean)
288 """
288 """
289
289
290 ctx1 = self
290 ctx1 = self
291 ctx2 = self._repo[other]
291 ctx2 = self._repo[other]
292
292
293 # This next code block is, admittedly, fragile logic that tests for
293 # This next code block is, admittedly, fragile logic that tests for
294 # reversing the contexts and wouldn't need to exist if it weren't for
294 # reversing the contexts and wouldn't need to exist if it weren't for
295 # the fast (and common) code path of comparing the working directory
295 # the fast (and common) code path of comparing the working directory
296 # with its first parent.
296 # with its first parent.
297 #
297 #
298 # What we're aiming for here is the ability to call:
298 # What we're aiming for here is the ability to call:
299 #
299 #
300 # workingctx.status(parentctx)
300 # workingctx.status(parentctx)
301 #
301 #
302 # If we always built the manifest for each context and compared those,
302 # If we always built the manifest for each context and compared those,
303 # then we'd be done. But the special case of the above call means we
303 # then we'd be done. But the special case of the above call means we
304 # just copy the manifest of the parent.
304 # just copy the manifest of the parent.
305 reversed = False
305 reversed = False
306 if (not isinstance(ctx1, changectx)
306 if (not isinstance(ctx1, changectx)
307 and isinstance(ctx2, changectx)):
307 and isinstance(ctx2, changectx)):
308 reversed = True
308 reversed = True
309 ctx1, ctx2 = ctx2, ctx1
309 ctx1, ctx2 = ctx2, ctx1
310
310
311 match = ctx2._matchstatus(ctx1, match)
311 match = ctx2._matchstatus(ctx1, match)
312 r = scmutil.status([], [], [], [], [], [], [])
312 r = scmutil.status([], [], [], [], [], [], [])
313 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
313 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
314 listunknown)
314 listunknown)
315
315
316 if reversed:
316 if reversed:
317 # Reverse added and removed. Clear deleted, unknown and ignored as
317 # Reverse added and removed. Clear deleted, unknown and ignored as
318 # these make no sense to reverse.
318 # these make no sense to reverse.
319 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
319 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
320 r.clean)
320 r.clean)
321
321
322 if listsubrepos:
322 if listsubrepos:
323 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
323 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
324 rev2 = ctx2.subrev(subpath)
324 rev2 = ctx2.subrev(subpath)
325 try:
325 try:
326 submatch = matchmod.narrowmatcher(subpath, match)
326 submatch = matchmod.narrowmatcher(subpath, match)
327 s = sub.status(rev2, match=submatch, ignored=listignored,
327 s = sub.status(rev2, match=submatch, ignored=listignored,
328 clean=listclean, unknown=listunknown,
328 clean=listclean, unknown=listunknown,
329 listsubrepos=True)
329 listsubrepos=True)
330 for rfiles, sfiles in zip(r, s):
330 for rfiles, sfiles in zip(r, s):
331 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
331 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
332 except error.LookupError:
332 except error.LookupError:
333 self._repo.ui.status(_("skipping missing "
333 self._repo.ui.status(_("skipping missing "
334 "subrepository: %s\n") % subpath)
334 "subrepository: %s\n") % subpath)
335
335
336 for l in r:
336 for l in r:
337 l.sort()
337 l.sort()
338
338
339 return r
339 return r
340
340
341
341
342 def makememctx(repo, parents, text, user, date, branch, files, store,
342 def makememctx(repo, parents, text, user, date, branch, files, store,
343 editor=None):
343 editor=None, extra=None):
344 def getfilectx(repo, memctx, path):
344 def getfilectx(repo, memctx, path):
345 data, mode, copied = store.getfile(path)
345 data, mode, copied = store.getfile(path)
346 if data is None:
346 if data is None:
347 return None
347 return None
348 islink, isexec = mode
348 islink, isexec = mode
349 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
349 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
350 copied=copied, memctx=memctx)
350 copied=copied, memctx=memctx)
351 if extra is None:
351 extra = {}
352 extra = {}
352 if branch:
353 if branch:
353 extra['branch'] = encoding.fromlocal(branch)
354 extra['branch'] = encoding.fromlocal(branch)
354 ctx = memctx(repo, parents, text, files, getfilectx, user,
355 ctx = memctx(repo, parents, text, files, getfilectx, user,
355 date, extra, editor)
356 date, extra, editor)
356 return ctx
357 return ctx
357
358
358 class changectx(basectx):
359 class changectx(basectx):
359 """A changecontext object makes access to data related to a particular
360 """A changecontext object makes access to data related to a particular
360 changeset convenient. It represents a read-only context already present in
361 changeset convenient. It represents a read-only context already present in
361 the repo."""
362 the repo."""
362 def __init__(self, repo, changeid=''):
363 def __init__(self, repo, changeid=''):
363 """changeid is a revision number, node, or tag"""
364 """changeid is a revision number, node, or tag"""
364
365
365 # since basectx.__new__ already took care of copying the object, we
366 # since basectx.__new__ already took care of copying the object, we
366 # don't need to do anything in __init__, so we just exit here
367 # don't need to do anything in __init__, so we just exit here
367 if isinstance(changeid, basectx):
368 if isinstance(changeid, basectx):
368 return
369 return
369
370
370 if changeid == '':
371 if changeid == '':
371 changeid = '.'
372 changeid = '.'
372 self._repo = repo
373 self._repo = repo
373
374
374 try:
375 try:
375 if isinstance(changeid, int):
376 if isinstance(changeid, int):
376 self._node = repo.changelog.node(changeid)
377 self._node = repo.changelog.node(changeid)
377 self._rev = changeid
378 self._rev = changeid
378 return
379 return
379 if isinstance(changeid, long):
380 if isinstance(changeid, long):
380 changeid = str(changeid)
381 changeid = str(changeid)
381 if changeid == 'null':
382 if changeid == 'null':
382 self._node = nullid
383 self._node = nullid
383 self._rev = nullrev
384 self._rev = nullrev
384 return
385 return
385 if changeid == 'tip':
386 if changeid == 'tip':
386 self._node = repo.changelog.tip()
387 self._node = repo.changelog.tip()
387 self._rev = repo.changelog.rev(self._node)
388 self._rev = repo.changelog.rev(self._node)
388 return
389 return
389 if changeid == '.' or changeid == repo.dirstate.p1():
390 if changeid == '.' or changeid == repo.dirstate.p1():
390 # this is a hack to delay/avoid loading obsmarkers
391 # this is a hack to delay/avoid loading obsmarkers
391 # when we know that '.' won't be hidden
392 # when we know that '.' won't be hidden
392 self._node = repo.dirstate.p1()
393 self._node = repo.dirstate.p1()
393 self._rev = repo.unfiltered().changelog.rev(self._node)
394 self._rev = repo.unfiltered().changelog.rev(self._node)
394 return
395 return
395 if len(changeid) == 20:
396 if len(changeid) == 20:
396 try:
397 try:
397 self._node = changeid
398 self._node = changeid
398 self._rev = repo.changelog.rev(changeid)
399 self._rev = repo.changelog.rev(changeid)
399 return
400 return
400 except error.FilteredRepoLookupError:
401 except error.FilteredRepoLookupError:
401 raise
402 raise
402 except LookupError:
403 except LookupError:
403 pass
404 pass
404
405
405 try:
406 try:
406 r = int(changeid)
407 r = int(changeid)
407 if str(r) != changeid:
408 if str(r) != changeid:
408 raise ValueError
409 raise ValueError
409 l = len(repo.changelog)
410 l = len(repo.changelog)
410 if r < 0:
411 if r < 0:
411 r += l
412 r += l
412 if r < 0 or r >= l:
413 if r < 0 or r >= l:
413 raise ValueError
414 raise ValueError
414 self._rev = r
415 self._rev = r
415 self._node = repo.changelog.node(r)
416 self._node = repo.changelog.node(r)
416 return
417 return
417 except error.FilteredIndexError:
418 except error.FilteredIndexError:
418 raise
419 raise
419 except (ValueError, OverflowError, IndexError):
420 except (ValueError, OverflowError, IndexError):
420 pass
421 pass
421
422
422 if len(changeid) == 40:
423 if len(changeid) == 40:
423 try:
424 try:
424 self._node = bin(changeid)
425 self._node = bin(changeid)
425 self._rev = repo.changelog.rev(self._node)
426 self._rev = repo.changelog.rev(self._node)
426 return
427 return
427 except error.FilteredLookupError:
428 except error.FilteredLookupError:
428 raise
429 raise
429 except (TypeError, LookupError):
430 except (TypeError, LookupError):
430 pass
431 pass
431
432
432 # lookup bookmarks through the name interface
433 # lookup bookmarks through the name interface
433 try:
434 try:
434 self._node = repo.names.singlenode(repo, changeid)
435 self._node = repo.names.singlenode(repo, changeid)
435 self._rev = repo.changelog.rev(self._node)
436 self._rev = repo.changelog.rev(self._node)
436 return
437 return
437 except KeyError:
438 except KeyError:
438 pass
439 pass
439 except error.FilteredRepoLookupError:
440 except error.FilteredRepoLookupError:
440 raise
441 raise
441 except error.RepoLookupError:
442 except error.RepoLookupError:
442 pass
443 pass
443
444
444 self._node = repo.unfiltered().changelog._partialmatch(changeid)
445 self._node = repo.unfiltered().changelog._partialmatch(changeid)
445 if self._node is not None:
446 if self._node is not None:
446 self._rev = repo.changelog.rev(self._node)
447 self._rev = repo.changelog.rev(self._node)
447 return
448 return
448
449
449 # lookup failed
450 # lookup failed
450 # check if it might have come from damaged dirstate
451 # check if it might have come from damaged dirstate
451 #
452 #
452 # XXX we could avoid the unfiltered if we had a recognizable
453 # XXX we could avoid the unfiltered if we had a recognizable
453 # exception for filtered changeset access
454 # exception for filtered changeset access
454 if changeid in repo.unfiltered().dirstate.parents():
455 if changeid in repo.unfiltered().dirstate.parents():
455 msg = _("working directory has unknown parent '%s'!")
456 msg = _("working directory has unknown parent '%s'!")
456 raise error.Abort(msg % short(changeid))
457 raise error.Abort(msg % short(changeid))
457 try:
458 try:
458 if len(changeid) == 20:
459 if len(changeid) == 20:
459 changeid = hex(changeid)
460 changeid = hex(changeid)
460 except TypeError:
461 except TypeError:
461 pass
462 pass
462 except (error.FilteredIndexError, error.FilteredLookupError,
463 except (error.FilteredIndexError, error.FilteredLookupError,
463 error.FilteredRepoLookupError):
464 error.FilteredRepoLookupError):
464 if repo.filtername.startswith('visible'):
465 if repo.filtername.startswith('visible'):
465 msg = _("hidden revision '%s'") % changeid
466 msg = _("hidden revision '%s'") % changeid
466 hint = _('use --hidden to access hidden revisions')
467 hint = _('use --hidden to access hidden revisions')
467 raise error.FilteredRepoLookupError(msg, hint=hint)
468 raise error.FilteredRepoLookupError(msg, hint=hint)
468 msg = _("filtered revision '%s' (not in '%s' subset)")
469 msg = _("filtered revision '%s' (not in '%s' subset)")
469 msg %= (changeid, repo.filtername)
470 msg %= (changeid, repo.filtername)
470 raise error.FilteredRepoLookupError(msg)
471 raise error.FilteredRepoLookupError(msg)
471 except IndexError:
472 except IndexError:
472 pass
473 pass
473 raise error.RepoLookupError(
474 raise error.RepoLookupError(
474 _("unknown revision '%s'") % changeid)
475 _("unknown revision '%s'") % changeid)
475
476
476 def __hash__(self):
477 def __hash__(self):
477 try:
478 try:
478 return hash(self._rev)
479 return hash(self._rev)
479 except AttributeError:
480 except AttributeError:
480 return id(self)
481 return id(self)
481
482
482 def __nonzero__(self):
483 def __nonzero__(self):
483 return self._rev != nullrev
484 return self._rev != nullrev
484
485
485 @propertycache
486 @propertycache
486 def _changeset(self):
487 def _changeset(self):
487 return self._repo.changelog.read(self.rev())
488 return self._repo.changelog.read(self.rev())
488
489
489 @propertycache
490 @propertycache
490 def _manifest(self):
491 def _manifest(self):
491 return self._repo.manifest.read(self._changeset[0])
492 return self._repo.manifest.read(self._changeset[0])
492
493
493 @propertycache
494 @propertycache
494 def _manifestdelta(self):
495 def _manifestdelta(self):
495 return self._repo.manifest.readdelta(self._changeset[0])
496 return self._repo.manifest.readdelta(self._changeset[0])
496
497
497 @propertycache
498 @propertycache
498 def _parents(self):
499 def _parents(self):
499 p = self._repo.changelog.parentrevs(self._rev)
500 p = self._repo.changelog.parentrevs(self._rev)
500 if p[1] == nullrev:
501 if p[1] == nullrev:
501 p = p[:-1]
502 p = p[:-1]
502 return [changectx(self._repo, x) for x in p]
503 return [changectx(self._repo, x) for x in p]
503
504
504 def changeset(self):
505 def changeset(self):
505 return self._changeset
506 return self._changeset
506 def manifestnode(self):
507 def manifestnode(self):
507 return self._changeset[0]
508 return self._changeset[0]
508
509
509 def user(self):
510 def user(self):
510 return self._changeset[1]
511 return self._changeset[1]
511 def date(self):
512 def date(self):
512 return self._changeset[2]
513 return self._changeset[2]
513 def files(self):
514 def files(self):
514 return self._changeset[3]
515 return self._changeset[3]
515 def description(self):
516 def description(self):
516 return self._changeset[4]
517 return self._changeset[4]
517 def branch(self):
518 def branch(self):
518 return encoding.tolocal(self._changeset[5].get("branch"))
519 return encoding.tolocal(self._changeset[5].get("branch"))
519 def closesbranch(self):
520 def closesbranch(self):
520 return 'close' in self._changeset[5]
521 return 'close' in self._changeset[5]
521 def extra(self):
522 def extra(self):
522 return self._changeset[5]
523 return self._changeset[5]
523 def tags(self):
524 def tags(self):
524 return self._repo.nodetags(self._node)
525 return self._repo.nodetags(self._node)
525 def bookmarks(self):
526 def bookmarks(self):
526 return self._repo.nodebookmarks(self._node)
527 return self._repo.nodebookmarks(self._node)
527 def phase(self):
528 def phase(self):
528 return self._repo._phasecache.phase(self._repo, self._rev)
529 return self._repo._phasecache.phase(self._repo, self._rev)
529 def hidden(self):
530 def hidden(self):
530 return self._rev in repoview.filterrevs(self._repo, 'visible')
531 return self._rev in repoview.filterrevs(self._repo, 'visible')
531
532
532 def children(self):
533 def children(self):
533 """return contexts for each child changeset"""
534 """return contexts for each child changeset"""
534 c = self._repo.changelog.children(self._node)
535 c = self._repo.changelog.children(self._node)
535 return [changectx(self._repo, x) for x in c]
536 return [changectx(self._repo, x) for x in c]
536
537
537 def ancestors(self):
538 def ancestors(self):
538 for a in self._repo.changelog.ancestors([self._rev]):
539 for a in self._repo.changelog.ancestors([self._rev]):
539 yield changectx(self._repo, a)
540 yield changectx(self._repo, a)
540
541
541 def descendants(self):
542 def descendants(self):
542 for d in self._repo.changelog.descendants([self._rev]):
543 for d in self._repo.changelog.descendants([self._rev]):
543 yield changectx(self._repo, d)
544 yield changectx(self._repo, d)
544
545
545 def filectx(self, path, fileid=None, filelog=None):
546 def filectx(self, path, fileid=None, filelog=None):
546 """get a file context from this changeset"""
547 """get a file context from this changeset"""
547 if fileid is None:
548 if fileid is None:
548 fileid = self.filenode(path)
549 fileid = self.filenode(path)
549 return filectx(self._repo, path, fileid=fileid,
550 return filectx(self._repo, path, fileid=fileid,
550 changectx=self, filelog=filelog)
551 changectx=self, filelog=filelog)
551
552
552 def ancestor(self, c2, warn=False):
553 def ancestor(self, c2, warn=False):
553 """return the "best" ancestor context of self and c2
554 """return the "best" ancestor context of self and c2
554
555
555 If there are multiple candidates, it will show a message and check
556 If there are multiple candidates, it will show a message and check
556 merge.preferancestor configuration before falling back to the
557 merge.preferancestor configuration before falling back to the
557 revlog ancestor."""
558 revlog ancestor."""
558 # deal with workingctxs
559 # deal with workingctxs
559 n2 = c2._node
560 n2 = c2._node
560 if n2 is None:
561 if n2 is None:
561 n2 = c2._parents[0]._node
562 n2 = c2._parents[0]._node
562 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
563 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
563 if not cahs:
564 if not cahs:
564 anc = nullid
565 anc = nullid
565 elif len(cahs) == 1:
566 elif len(cahs) == 1:
566 anc = cahs[0]
567 anc = cahs[0]
567 else:
568 else:
568 for r in self._repo.ui.configlist('merge', 'preferancestor'):
569 for r in self._repo.ui.configlist('merge', 'preferancestor'):
569 try:
570 try:
570 ctx = changectx(self._repo, r)
571 ctx = changectx(self._repo, r)
571 except error.RepoLookupError:
572 except error.RepoLookupError:
572 continue
573 continue
573 anc = ctx.node()
574 anc = ctx.node()
574 if anc in cahs:
575 if anc in cahs:
575 break
576 break
576 else:
577 else:
577 anc = self._repo.changelog.ancestor(self._node, n2)
578 anc = self._repo.changelog.ancestor(self._node, n2)
578 if warn:
579 if warn:
579 self._repo.ui.status(
580 self._repo.ui.status(
580 (_("note: using %s as ancestor of %s and %s\n") %
581 (_("note: using %s as ancestor of %s and %s\n") %
581 (short(anc), short(self._node), short(n2))) +
582 (short(anc), short(self._node), short(n2))) +
582 ''.join(_(" alternatively, use --config "
583 ''.join(_(" alternatively, use --config "
583 "merge.preferancestor=%s\n") %
584 "merge.preferancestor=%s\n") %
584 short(n) for n in sorted(cahs) if n != anc))
585 short(n) for n in sorted(cahs) if n != anc))
585 return changectx(self._repo, anc)
586 return changectx(self._repo, anc)
586
587
587 def descendant(self, other):
588 def descendant(self, other):
588 """True if other is descendant of this changeset"""
589 """True if other is descendant of this changeset"""
589 return self._repo.changelog.descendant(self._rev, other._rev)
590 return self._repo.changelog.descendant(self._rev, other._rev)
590
591
591 def walk(self, match):
592 def walk(self, match):
592 '''Generates matching file names.'''
593 '''Generates matching file names.'''
593
594
594 # Override match.bad method to have message with nodeid
595 # Override match.bad method to have message with nodeid
595 match = copy.copy(match)
596 match = copy.copy(match)
596 oldbad = match.bad
597 oldbad = match.bad
597 def bad(fn, msg):
598 def bad(fn, msg):
598 # The manifest doesn't know about subrepos, so don't complain about
599 # The manifest doesn't know about subrepos, so don't complain about
599 # paths into valid subrepos.
600 # paths into valid subrepos.
600 if any(fn == s or fn.startswith(s + '/')
601 if any(fn == s or fn.startswith(s + '/')
601 for s in self.substate):
602 for s in self.substate):
602 return
603 return
603 oldbad(fn, _('no such file in rev %s') % self)
604 oldbad(fn, _('no such file in rev %s') % self)
604 match.bad = bad
605 match.bad = bad
605
606
606 return self._manifest.walk(match)
607 return self._manifest.walk(match)
607
608
608 def matches(self, match):
609 def matches(self, match):
609 return self.walk(match)
610 return self.walk(match)
610
611
611 class basefilectx(object):
612 class basefilectx(object):
612 """A filecontext object represents the common logic for its children:
613 """A filecontext object represents the common logic for its children:
613 filectx: read-only access to a filerevision that is already present
614 filectx: read-only access to a filerevision that is already present
614 in the repo,
615 in the repo,
615 workingfilectx: a filecontext that represents files from the working
616 workingfilectx: a filecontext that represents files from the working
616 directory,
617 directory,
617 memfilectx: a filecontext that represents files in-memory."""
618 memfilectx: a filecontext that represents files in-memory."""
618 def __new__(cls, repo, path, *args, **kwargs):
619 def __new__(cls, repo, path, *args, **kwargs):
619 return super(basefilectx, cls).__new__(cls)
620 return super(basefilectx, cls).__new__(cls)
620
621
621 @propertycache
622 @propertycache
622 def _filelog(self):
623 def _filelog(self):
623 return self._repo.file(self._path)
624 return self._repo.file(self._path)
624
625
625 @propertycache
626 @propertycache
626 def _changeid(self):
627 def _changeid(self):
627 if '_changeid' in self.__dict__:
628 if '_changeid' in self.__dict__:
628 return self._changeid
629 return self._changeid
629 elif '_changectx' in self.__dict__:
630 elif '_changectx' in self.__dict__:
630 return self._changectx.rev()
631 return self._changectx.rev()
631 elif '_descendantrev' in self.__dict__:
632 elif '_descendantrev' in self.__dict__:
632 # this file context was created from a revision with a known
633 # this file context was created from a revision with a known
633 # descendant, we can (lazily) correct for linkrev aliases
634 # descendant, we can (lazily) correct for linkrev aliases
634 return self._adjustlinkrev(self._path, self._filelog,
635 return self._adjustlinkrev(self._path, self._filelog,
635 self._filenode, self._descendantrev)
636 self._filenode, self._descendantrev)
636 else:
637 else:
637 return self._filelog.linkrev(self._filerev)
638 return self._filelog.linkrev(self._filerev)
638
639
639 @propertycache
640 @propertycache
640 def _filenode(self):
641 def _filenode(self):
641 if '_fileid' in self.__dict__:
642 if '_fileid' in self.__dict__:
642 return self._filelog.lookup(self._fileid)
643 return self._filelog.lookup(self._fileid)
643 else:
644 else:
644 return self._changectx.filenode(self._path)
645 return self._changectx.filenode(self._path)
645
646
646 @propertycache
647 @propertycache
647 def _filerev(self):
648 def _filerev(self):
648 return self._filelog.rev(self._filenode)
649 return self._filelog.rev(self._filenode)
649
650
650 @propertycache
651 @propertycache
651 def _repopath(self):
652 def _repopath(self):
652 return self._path
653 return self._path
653
654
654 def __nonzero__(self):
655 def __nonzero__(self):
655 try:
656 try:
656 self._filenode
657 self._filenode
657 return True
658 return True
658 except error.LookupError:
659 except error.LookupError:
659 # file is missing
660 # file is missing
660 return False
661 return False
661
662
662 def __str__(self):
663 def __str__(self):
663 return "%s@%s" % (self.path(), self._changectx)
664 return "%s@%s" % (self.path(), self._changectx)
664
665
665 def __repr__(self):
666 def __repr__(self):
666 return "<%s %s>" % (type(self).__name__, str(self))
667 return "<%s %s>" % (type(self).__name__, str(self))
667
668
668 def __hash__(self):
669 def __hash__(self):
669 try:
670 try:
670 return hash((self._path, self._filenode))
671 return hash((self._path, self._filenode))
671 except AttributeError:
672 except AttributeError:
672 return id(self)
673 return id(self)
673
674
674 def __eq__(self, other):
675 def __eq__(self, other):
675 try:
676 try:
676 return (type(self) == type(other) and self._path == other._path
677 return (type(self) == type(other) and self._path == other._path
677 and self._filenode == other._filenode)
678 and self._filenode == other._filenode)
678 except AttributeError:
679 except AttributeError:
679 return False
680 return False
680
681
681 def __ne__(self, other):
682 def __ne__(self, other):
682 return not (self == other)
683 return not (self == other)
683
684
684 def filerev(self):
685 def filerev(self):
685 return self._filerev
686 return self._filerev
686 def filenode(self):
687 def filenode(self):
687 return self._filenode
688 return self._filenode
688 def flags(self):
689 def flags(self):
689 return self._changectx.flags(self._path)
690 return self._changectx.flags(self._path)
690 def filelog(self):
691 def filelog(self):
691 return self._filelog
692 return self._filelog
692 def rev(self):
693 def rev(self):
693 return self._changeid
694 return self._changeid
694 def linkrev(self):
695 def linkrev(self):
695 return self._filelog.linkrev(self._filerev)
696 return self._filelog.linkrev(self._filerev)
696 def node(self):
697 def node(self):
697 return self._changectx.node()
698 return self._changectx.node()
698 def hex(self):
699 def hex(self):
699 return self._changectx.hex()
700 return self._changectx.hex()
700 def user(self):
701 def user(self):
701 return self._changectx.user()
702 return self._changectx.user()
702 def date(self):
703 def date(self):
703 return self._changectx.date()
704 return self._changectx.date()
704 def files(self):
705 def files(self):
705 return self._changectx.files()
706 return self._changectx.files()
706 def description(self):
707 def description(self):
707 return self._changectx.description()
708 return self._changectx.description()
708 def branch(self):
709 def branch(self):
709 return self._changectx.branch()
710 return self._changectx.branch()
710 def extra(self):
711 def extra(self):
711 return self._changectx.extra()
712 return self._changectx.extra()
712 def phase(self):
713 def phase(self):
713 return self._changectx.phase()
714 return self._changectx.phase()
714 def phasestr(self):
715 def phasestr(self):
715 return self._changectx.phasestr()
716 return self._changectx.phasestr()
716 def manifest(self):
717 def manifest(self):
717 return self._changectx.manifest()
718 return self._changectx.manifest()
718 def changectx(self):
719 def changectx(self):
719 return self._changectx
720 return self._changectx
720 def repo(self):
721 def repo(self):
721 return self._repo
722 return self._repo
722
723
723 def path(self):
724 def path(self):
724 return self._path
725 return self._path
725
726
726 def isbinary(self):
727 def isbinary(self):
727 try:
728 try:
728 return util.binary(self.data())
729 return util.binary(self.data())
729 except IOError:
730 except IOError:
730 return False
731 return False
731 def isexec(self):
732 def isexec(self):
732 return 'x' in self.flags()
733 return 'x' in self.flags()
733 def islink(self):
734 def islink(self):
734 return 'l' in self.flags()
735 return 'l' in self.flags()
735
736
736 def cmp(self, fctx):
737 def cmp(self, fctx):
737 """compare with other file context
738 """compare with other file context
738
739
739 returns True if different than fctx.
740 returns True if different than fctx.
740 """
741 """
741 if (fctx._filerev is None
742 if (fctx._filerev is None
742 and (self._repo._encodefilterpats
743 and (self._repo._encodefilterpats
743 # if file data starts with '\1\n', empty metadata block is
744 # if file data starts with '\1\n', empty metadata block is
744 # prepended, which adds 4 bytes to filelog.size().
745 # prepended, which adds 4 bytes to filelog.size().
745 or self.size() - 4 == fctx.size())
746 or self.size() - 4 == fctx.size())
746 or self.size() == fctx.size()):
747 or self.size() == fctx.size()):
747 return self._filelog.cmp(self._filenode, fctx.data())
748 return self._filelog.cmp(self._filenode, fctx.data())
748
749
749 return True
750 return True
750
751
751 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
752 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
752 """return the first ancestor of <srcrev> introducing <fnode>
753 """return the first ancestor of <srcrev> introducing <fnode>
753
754
754 If the linkrev of the file revision does not point to an ancestor of
755 If the linkrev of the file revision does not point to an ancestor of
755 srcrev, we'll walk down the ancestors until we find one introducing
756 srcrev, we'll walk down the ancestors until we find one introducing
756 this file revision.
757 this file revision.
757
758
758 :repo: a localrepository object (used to access changelog and manifest)
759 :repo: a localrepository object (used to access changelog and manifest)
759 :path: the file path
760 :path: the file path
760 :fnode: the nodeid of the file revision
761 :fnode: the nodeid of the file revision
761 :filelog: the filelog of this path
762 :filelog: the filelog of this path
762 :srcrev: the changeset revision we search ancestors from
763 :srcrev: the changeset revision we search ancestors from
763 :inclusive: if true, the src revision will also be checked
764 :inclusive: if true, the src revision will also be checked
764 """
765 """
765 repo = self._repo
766 repo = self._repo
766 cl = repo.unfiltered().changelog
767 cl = repo.unfiltered().changelog
767 ma = repo.manifest
768 ma = repo.manifest
768 # fetch the linkrev
769 # fetch the linkrev
769 fr = filelog.rev(fnode)
770 fr = filelog.rev(fnode)
770 lkr = filelog.linkrev(fr)
771 lkr = filelog.linkrev(fr)
771 # hack to reuse ancestor computation when searching for renames
772 # hack to reuse ancestor computation when searching for renames
772 memberanc = getattr(self, '_ancestrycontext', None)
773 memberanc = getattr(self, '_ancestrycontext', None)
773 iteranc = None
774 iteranc = None
774 if srcrev is None:
775 if srcrev is None:
775 # wctx case, used by workingfilectx during mergecopy
776 # wctx case, used by workingfilectx during mergecopy
776 revs = [p.rev() for p in self._repo[None].parents()]
777 revs = [p.rev() for p in self._repo[None].parents()]
777 inclusive = True # we skipped the real (revless) source
778 inclusive = True # we skipped the real (revless) source
778 else:
779 else:
779 revs = [srcrev]
780 revs = [srcrev]
780 if memberanc is None:
781 if memberanc is None:
781 memberanc = iteranc = cl.ancestors(revs, lkr,
782 memberanc = iteranc = cl.ancestors(revs, lkr,
782 inclusive=inclusive)
783 inclusive=inclusive)
783 # check if this linkrev is an ancestor of srcrev
784 # check if this linkrev is an ancestor of srcrev
784 if lkr not in memberanc:
785 if lkr not in memberanc:
785 if iteranc is None:
786 if iteranc is None:
786 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
787 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
787 for a in iteranc:
788 for a in iteranc:
788 ac = cl.read(a) # get changeset data (we avoid object creation)
789 ac = cl.read(a) # get changeset data (we avoid object creation)
789 if path in ac[3]: # checking the 'files' field.
790 if path in ac[3]: # checking the 'files' field.
790 # The file has been touched, check if the content is
791 # The file has been touched, check if the content is
791 # similar to the one we search for.
792 # similar to the one we search for.
792 if fnode == ma.readfast(ac[0]).get(path):
793 if fnode == ma.readfast(ac[0]).get(path):
793 return a
794 return a
794 # In theory, we should never get out of that loop without a result.
795 # In theory, we should never get out of that loop without a result.
795 # But if manifest uses a buggy file revision (not children of the
796 # But if manifest uses a buggy file revision (not children of the
796 # one it replaces) we could. Such a buggy situation will likely
797 # one it replaces) we could. Such a buggy situation will likely
797 # result is crash somewhere else at to some point.
798 # result is crash somewhere else at to some point.
798 return lkr
799 return lkr
799
800
800 def introrev(self):
801 def introrev(self):
801 """return the rev of the changeset which introduced this file revision
802 """return the rev of the changeset which introduced this file revision
802
803
803 This method is different from linkrev because it take into account the
804 This method is different from linkrev because it take into account the
804 changeset the filectx was created from. It ensures the returned
805 changeset the filectx was created from. It ensures the returned
805 revision is one of its ancestors. This prevents bugs from
806 revision is one of its ancestors. This prevents bugs from
806 'linkrev-shadowing' when a file revision is used by multiple
807 'linkrev-shadowing' when a file revision is used by multiple
807 changesets.
808 changesets.
808 """
809 """
809 lkr = self.linkrev()
810 lkr = self.linkrev()
810 attrs = vars(self)
811 attrs = vars(self)
811 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
812 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
812 if noctx or self.rev() == lkr:
813 if noctx or self.rev() == lkr:
813 return self.linkrev()
814 return self.linkrev()
814 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
815 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
815 self.rev(), inclusive=True)
816 self.rev(), inclusive=True)
816
817
817 def _parentfilectx(self, path, fileid, filelog):
818 def _parentfilectx(self, path, fileid, filelog):
818 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
819 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
819 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
820 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
820 if '_changeid' in vars(self) or '_changectx' in vars(self):
821 if '_changeid' in vars(self) or '_changectx' in vars(self):
821 # If self is associated with a changeset (probably explicitly
822 # If self is associated with a changeset (probably explicitly
822 # fed), ensure the created filectx is associated with a
823 # fed), ensure the created filectx is associated with a
823 # changeset that is an ancestor of self.changectx.
824 # changeset that is an ancestor of self.changectx.
824 # This lets us later use _adjustlinkrev to get a correct link.
825 # This lets us later use _adjustlinkrev to get a correct link.
825 fctx._descendantrev = self.rev()
826 fctx._descendantrev = self.rev()
826 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
827 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
827 elif '_descendantrev' in vars(self):
828 elif '_descendantrev' in vars(self):
828 # Otherwise propagate _descendantrev if we have one associated.
829 # Otherwise propagate _descendantrev if we have one associated.
829 fctx._descendantrev = self._descendantrev
830 fctx._descendantrev = self._descendantrev
830 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
831 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
831 return fctx
832 return fctx
832
833
833 def parents(self):
834 def parents(self):
834 _path = self._path
835 _path = self._path
835 fl = self._filelog
836 fl = self._filelog
836 parents = self._filelog.parents(self._filenode)
837 parents = self._filelog.parents(self._filenode)
837 pl = [(_path, node, fl) for node in parents if node != nullid]
838 pl = [(_path, node, fl) for node in parents if node != nullid]
838
839
839 r = fl.renamed(self._filenode)
840 r = fl.renamed(self._filenode)
840 if r:
841 if r:
841 # - In the simple rename case, both parent are nullid, pl is empty.
842 # - In the simple rename case, both parent are nullid, pl is empty.
842 # - In case of merge, only one of the parent is null id and should
843 # - In case of merge, only one of the parent is null id and should
843 # be replaced with the rename information. This parent is -always-
844 # be replaced with the rename information. This parent is -always-
844 # the first one.
845 # the first one.
845 #
846 #
846 # As null id have always been filtered out in the previous list
847 # As null id have always been filtered out in the previous list
847 # comprehension, inserting to 0 will always result in "replacing
848 # comprehension, inserting to 0 will always result in "replacing
848 # first nullid parent with rename information.
849 # first nullid parent with rename information.
849 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
850 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
850
851
851 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
852 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
852
853
853 def p1(self):
854 def p1(self):
854 return self.parents()[0]
855 return self.parents()[0]
855
856
856 def p2(self):
857 def p2(self):
857 p = self.parents()
858 p = self.parents()
858 if len(p) == 2:
859 if len(p) == 2:
859 return p[1]
860 return p[1]
860 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
861 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
861
862
862 def annotate(self, follow=False, linenumber=None, diffopts=None):
863 def annotate(self, follow=False, linenumber=None, diffopts=None):
863 '''returns a list of tuples of (ctx, line) for each line
864 '''returns a list of tuples of (ctx, line) for each line
864 in the file, where ctx is the filectx of the node where
865 in the file, where ctx is the filectx of the node where
865 that line was last changed.
866 that line was last changed.
866 This returns tuples of ((ctx, linenumber), line) for each line,
867 This returns tuples of ((ctx, linenumber), line) for each line,
867 if "linenumber" parameter is NOT "None".
868 if "linenumber" parameter is NOT "None".
868 In such tuples, linenumber means one at the first appearance
869 In such tuples, linenumber means one at the first appearance
869 in the managed file.
870 in the managed file.
870 To reduce annotation cost,
871 To reduce annotation cost,
871 this returns fixed value(False is used) as linenumber,
872 this returns fixed value(False is used) as linenumber,
872 if "linenumber" parameter is "False".'''
873 if "linenumber" parameter is "False".'''
873
874
874 if linenumber is None:
875 if linenumber is None:
875 def decorate(text, rev):
876 def decorate(text, rev):
876 return ([rev] * len(text.splitlines()), text)
877 return ([rev] * len(text.splitlines()), text)
877 elif linenumber:
878 elif linenumber:
878 def decorate(text, rev):
879 def decorate(text, rev):
879 size = len(text.splitlines())
880 size = len(text.splitlines())
880 return ([(rev, i) for i in xrange(1, size + 1)], text)
881 return ([(rev, i) for i in xrange(1, size + 1)], text)
881 else:
882 else:
882 def decorate(text, rev):
883 def decorate(text, rev):
883 return ([(rev, False)] * len(text.splitlines()), text)
884 return ([(rev, False)] * len(text.splitlines()), text)
884
885
885 def pair(parent, child):
886 def pair(parent, child):
886 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
887 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
887 refine=True)
888 refine=True)
888 for (a1, a2, b1, b2), t in blocks:
889 for (a1, a2, b1, b2), t in blocks:
889 # Changed blocks ('!') or blocks made only of blank lines ('~')
890 # Changed blocks ('!') or blocks made only of blank lines ('~')
890 # belong to the child.
891 # belong to the child.
891 if t == '=':
892 if t == '=':
892 child[0][b1:b2] = parent[0][a1:a2]
893 child[0][b1:b2] = parent[0][a1:a2]
893 return child
894 return child
894
895
895 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
896 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
896
897
897 def parents(f):
898 def parents(f):
898 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
899 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
899 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
900 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
900 # from the topmost introrev (= srcrev) down to p.linkrev() if it
901 # from the topmost introrev (= srcrev) down to p.linkrev() if it
901 # isn't an ancestor of the srcrev.
902 # isn't an ancestor of the srcrev.
902 f._changeid
903 f._changeid
903 pl = f.parents()
904 pl = f.parents()
904
905
905 # Don't return renamed parents if we aren't following.
906 # Don't return renamed parents if we aren't following.
906 if not follow:
907 if not follow:
907 pl = [p for p in pl if p.path() == f.path()]
908 pl = [p for p in pl if p.path() == f.path()]
908
909
909 # renamed filectx won't have a filelog yet, so set it
910 # renamed filectx won't have a filelog yet, so set it
910 # from the cache to save time
911 # from the cache to save time
911 for p in pl:
912 for p in pl:
912 if not '_filelog' in p.__dict__:
913 if not '_filelog' in p.__dict__:
913 p._filelog = getlog(p.path())
914 p._filelog = getlog(p.path())
914
915
915 return pl
916 return pl
916
917
917 # use linkrev to find the first changeset where self appeared
918 # use linkrev to find the first changeset where self appeared
918 base = self
919 base = self
919 introrev = self.introrev()
920 introrev = self.introrev()
920 if self.rev() != introrev:
921 if self.rev() != introrev:
921 base = self.filectx(self.filenode(), changeid=introrev)
922 base = self.filectx(self.filenode(), changeid=introrev)
922 if getattr(base, '_ancestrycontext', None) is None:
923 if getattr(base, '_ancestrycontext', None) is None:
923 cl = self._repo.changelog
924 cl = self._repo.changelog
924 if introrev is None:
925 if introrev is None:
925 # wctx is not inclusive, but works because _ancestrycontext
926 # wctx is not inclusive, but works because _ancestrycontext
926 # is used to test filelog revisions
927 # is used to test filelog revisions
927 ac = cl.ancestors([p.rev() for p in base.parents()],
928 ac = cl.ancestors([p.rev() for p in base.parents()],
928 inclusive=True)
929 inclusive=True)
929 else:
930 else:
930 ac = cl.ancestors([introrev], inclusive=True)
931 ac = cl.ancestors([introrev], inclusive=True)
931 base._ancestrycontext = ac
932 base._ancestrycontext = ac
932
933
933 # This algorithm would prefer to be recursive, but Python is a
934 # This algorithm would prefer to be recursive, but Python is a
934 # bit recursion-hostile. Instead we do an iterative
935 # bit recursion-hostile. Instead we do an iterative
935 # depth-first search.
936 # depth-first search.
936
937
937 visit = [base]
938 visit = [base]
938 hist = {}
939 hist = {}
939 pcache = {}
940 pcache = {}
940 needed = {base: 1}
941 needed = {base: 1}
941 while visit:
942 while visit:
942 f = visit[-1]
943 f = visit[-1]
943 pcached = f in pcache
944 pcached = f in pcache
944 if not pcached:
945 if not pcached:
945 pcache[f] = parents(f)
946 pcache[f] = parents(f)
946
947
947 ready = True
948 ready = True
948 pl = pcache[f]
949 pl = pcache[f]
949 for p in pl:
950 for p in pl:
950 if p not in hist:
951 if p not in hist:
951 ready = False
952 ready = False
952 visit.append(p)
953 visit.append(p)
953 if not pcached:
954 if not pcached:
954 needed[p] = needed.get(p, 0) + 1
955 needed[p] = needed.get(p, 0) + 1
955 if ready:
956 if ready:
956 visit.pop()
957 visit.pop()
957 reusable = f in hist
958 reusable = f in hist
958 if reusable:
959 if reusable:
959 curr = hist[f]
960 curr = hist[f]
960 else:
961 else:
961 curr = decorate(f.data(), f)
962 curr = decorate(f.data(), f)
962 for p in pl:
963 for p in pl:
963 if not reusable:
964 if not reusable:
964 curr = pair(hist[p], curr)
965 curr = pair(hist[p], curr)
965 if needed[p] == 1:
966 if needed[p] == 1:
966 del hist[p]
967 del hist[p]
967 del needed[p]
968 del needed[p]
968 else:
969 else:
969 needed[p] -= 1
970 needed[p] -= 1
970
971
971 hist[f] = curr
972 hist[f] = curr
972 pcache[f] = []
973 pcache[f] = []
973
974
974 return zip(hist[base][0], hist[base][1].splitlines(True))
975 return zip(hist[base][0], hist[base][1].splitlines(True))
975
976
976 def ancestors(self, followfirst=False):
977 def ancestors(self, followfirst=False):
977 visit = {}
978 visit = {}
978 c = self
979 c = self
979 if followfirst:
980 if followfirst:
980 cut = 1
981 cut = 1
981 else:
982 else:
982 cut = None
983 cut = None
983
984
984 while True:
985 while True:
985 for parent in c.parents()[:cut]:
986 for parent in c.parents()[:cut]:
986 visit[(parent.linkrev(), parent.filenode())] = parent
987 visit[(parent.linkrev(), parent.filenode())] = parent
987 if not visit:
988 if not visit:
988 break
989 break
989 c = visit.pop(max(visit))
990 c = visit.pop(max(visit))
990 yield c
991 yield c
991
992
992 class filectx(basefilectx):
993 class filectx(basefilectx):
993 """A filecontext object makes access to data related to a particular
994 """A filecontext object makes access to data related to a particular
994 filerevision convenient."""
995 filerevision convenient."""
995 def __init__(self, repo, path, changeid=None, fileid=None,
996 def __init__(self, repo, path, changeid=None, fileid=None,
996 filelog=None, changectx=None):
997 filelog=None, changectx=None):
997 """changeid can be a changeset revision, node, or tag.
998 """changeid can be a changeset revision, node, or tag.
998 fileid can be a file revision or node."""
999 fileid can be a file revision or node."""
999 self._repo = repo
1000 self._repo = repo
1000 self._path = path
1001 self._path = path
1001
1002
1002 assert (changeid is not None
1003 assert (changeid is not None
1003 or fileid is not None
1004 or fileid is not None
1004 or changectx is not None), \
1005 or changectx is not None), \
1005 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1006 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1006 % (changeid, fileid, changectx))
1007 % (changeid, fileid, changectx))
1007
1008
1008 if filelog is not None:
1009 if filelog is not None:
1009 self._filelog = filelog
1010 self._filelog = filelog
1010
1011
1011 if changeid is not None:
1012 if changeid is not None:
1012 self._changeid = changeid
1013 self._changeid = changeid
1013 if changectx is not None:
1014 if changectx is not None:
1014 self._changectx = changectx
1015 self._changectx = changectx
1015 if fileid is not None:
1016 if fileid is not None:
1016 self._fileid = fileid
1017 self._fileid = fileid
1017
1018
1018 @propertycache
1019 @propertycache
1019 def _changectx(self):
1020 def _changectx(self):
1020 try:
1021 try:
1021 return changectx(self._repo, self._changeid)
1022 return changectx(self._repo, self._changeid)
1022 except error.FilteredRepoLookupError:
1023 except error.FilteredRepoLookupError:
1023 # Linkrev may point to any revision in the repository. When the
1024 # Linkrev may point to any revision in the repository. When the
1024 # repository is filtered this may lead to `filectx` trying to build
1025 # repository is filtered this may lead to `filectx` trying to build
1025 # `changectx` for filtered revision. In such case we fallback to
1026 # `changectx` for filtered revision. In such case we fallback to
1026 # creating `changectx` on the unfiltered version of the reposition.
1027 # creating `changectx` on the unfiltered version of the reposition.
1027 # This fallback should not be an issue because `changectx` from
1028 # This fallback should not be an issue because `changectx` from
1028 # `filectx` are not used in complex operations that care about
1029 # `filectx` are not used in complex operations that care about
1029 # filtering.
1030 # filtering.
1030 #
1031 #
1031 # This fallback is a cheap and dirty fix that prevent several
1032 # This fallback is a cheap and dirty fix that prevent several
1032 # crashes. It does not ensure the behavior is correct. However the
1033 # crashes. It does not ensure the behavior is correct. However the
1033 # behavior was not correct before filtering either and "incorrect
1034 # behavior was not correct before filtering either and "incorrect
1034 # behavior" is seen as better as "crash"
1035 # behavior" is seen as better as "crash"
1035 #
1036 #
1036 # Linkrevs have several serious troubles with filtering that are
1037 # Linkrevs have several serious troubles with filtering that are
1037 # complicated to solve. Proper handling of the issue here should be
1038 # complicated to solve. Proper handling of the issue here should be
1038 # considered when solving linkrev issue are on the table.
1039 # considered when solving linkrev issue are on the table.
1039 return changectx(self._repo.unfiltered(), self._changeid)
1040 return changectx(self._repo.unfiltered(), self._changeid)
1040
1041
1041 def filectx(self, fileid, changeid=None):
1042 def filectx(self, fileid, changeid=None):
1042 '''opens an arbitrary revision of the file without
1043 '''opens an arbitrary revision of the file without
1043 opening a new filelog'''
1044 opening a new filelog'''
1044 return filectx(self._repo, self._path, fileid=fileid,
1045 return filectx(self._repo, self._path, fileid=fileid,
1045 filelog=self._filelog, changeid=changeid)
1046 filelog=self._filelog, changeid=changeid)
1046
1047
1047 def data(self):
1048 def data(self):
1048 try:
1049 try:
1049 return self._filelog.read(self._filenode)
1050 return self._filelog.read(self._filenode)
1050 except error.CensoredNodeError:
1051 except error.CensoredNodeError:
1051 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1052 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1052 return ""
1053 return ""
1053 raise util.Abort(_("censored node: %s") % short(self._filenode),
1054 raise util.Abort(_("censored node: %s") % short(self._filenode),
1054 hint=_("set censor.policy to ignore errors"))
1055 hint=_("set censor.policy to ignore errors"))
1055
1056
1056 def size(self):
1057 def size(self):
1057 return self._filelog.size(self._filerev)
1058 return self._filelog.size(self._filerev)
1058
1059
1059 def renamed(self):
1060 def renamed(self):
1060 """check if file was actually renamed in this changeset revision
1061 """check if file was actually renamed in this changeset revision
1061
1062
1062 If rename logged in file revision, we report copy for changeset only
1063 If rename logged in file revision, we report copy for changeset only
1063 if file revisions linkrev points back to the changeset in question
1064 if file revisions linkrev points back to the changeset in question
1064 or both changeset parents contain different file revisions.
1065 or both changeset parents contain different file revisions.
1065 """
1066 """
1066
1067
1067 renamed = self._filelog.renamed(self._filenode)
1068 renamed = self._filelog.renamed(self._filenode)
1068 if not renamed:
1069 if not renamed:
1069 return renamed
1070 return renamed
1070
1071
1071 if self.rev() == self.linkrev():
1072 if self.rev() == self.linkrev():
1072 return renamed
1073 return renamed
1073
1074
1074 name = self.path()
1075 name = self.path()
1075 fnode = self._filenode
1076 fnode = self._filenode
1076 for p in self._changectx.parents():
1077 for p in self._changectx.parents():
1077 try:
1078 try:
1078 if fnode == p.filenode(name):
1079 if fnode == p.filenode(name):
1079 return None
1080 return None
1080 except error.LookupError:
1081 except error.LookupError:
1081 pass
1082 pass
1082 return renamed
1083 return renamed
1083
1084
1084 def children(self):
1085 def children(self):
1085 # hard for renames
1086 # hard for renames
1086 c = self._filelog.children(self._filenode)
1087 c = self._filelog.children(self._filenode)
1087 return [filectx(self._repo, self._path, fileid=x,
1088 return [filectx(self._repo, self._path, fileid=x,
1088 filelog=self._filelog) for x in c]
1089 filelog=self._filelog) for x in c]
1089
1090
1090 class committablectx(basectx):
1091 class committablectx(basectx):
1091 """A committablectx object provides common functionality for a context that
1092 """A committablectx object provides common functionality for a context that
1092 wants the ability to commit, e.g. workingctx or memctx."""
1093 wants the ability to commit, e.g. workingctx or memctx."""
1093 def __init__(self, repo, text="", user=None, date=None, extra=None,
1094 def __init__(self, repo, text="", user=None, date=None, extra=None,
1094 changes=None):
1095 changes=None):
1095 self._repo = repo
1096 self._repo = repo
1096 self._rev = None
1097 self._rev = None
1097 self._node = None
1098 self._node = None
1098 self._text = text
1099 self._text = text
1099 if date:
1100 if date:
1100 self._date = util.parsedate(date)
1101 self._date = util.parsedate(date)
1101 if user:
1102 if user:
1102 self._user = user
1103 self._user = user
1103 if changes:
1104 if changes:
1104 self._status = changes
1105 self._status = changes
1105
1106
1106 self._extra = {}
1107 self._extra = {}
1107 if extra:
1108 if extra:
1108 self._extra = extra.copy()
1109 self._extra = extra.copy()
1109 if 'branch' not in self._extra:
1110 if 'branch' not in self._extra:
1110 try:
1111 try:
1111 branch = encoding.fromlocal(self._repo.dirstate.branch())
1112 branch = encoding.fromlocal(self._repo.dirstate.branch())
1112 except UnicodeDecodeError:
1113 except UnicodeDecodeError:
1113 raise util.Abort(_('branch name not in UTF-8!'))
1114 raise util.Abort(_('branch name not in UTF-8!'))
1114 self._extra['branch'] = branch
1115 self._extra['branch'] = branch
1115 if self._extra['branch'] == '':
1116 if self._extra['branch'] == '':
1116 self._extra['branch'] = 'default'
1117 self._extra['branch'] = 'default'
1117
1118
1118 def __str__(self):
1119 def __str__(self):
1119 return str(self._parents[0]) + "+"
1120 return str(self._parents[0]) + "+"
1120
1121
1121 def __nonzero__(self):
1122 def __nonzero__(self):
1122 return True
1123 return True
1123
1124
1124 def _buildflagfunc(self):
1125 def _buildflagfunc(self):
1125 # Create a fallback function for getting file flags when the
1126 # Create a fallback function for getting file flags when the
1126 # filesystem doesn't support them
1127 # filesystem doesn't support them
1127
1128
1128 copiesget = self._repo.dirstate.copies().get
1129 copiesget = self._repo.dirstate.copies().get
1129
1130
1130 if len(self._parents) < 2:
1131 if len(self._parents) < 2:
1131 # when we have one parent, it's easy: copy from parent
1132 # when we have one parent, it's easy: copy from parent
1132 man = self._parents[0].manifest()
1133 man = self._parents[0].manifest()
1133 def func(f):
1134 def func(f):
1134 f = copiesget(f, f)
1135 f = copiesget(f, f)
1135 return man.flags(f)
1136 return man.flags(f)
1136 else:
1137 else:
1137 # merges are tricky: we try to reconstruct the unstored
1138 # merges are tricky: we try to reconstruct the unstored
1138 # result from the merge (issue1802)
1139 # result from the merge (issue1802)
1139 p1, p2 = self._parents
1140 p1, p2 = self._parents
1140 pa = p1.ancestor(p2)
1141 pa = p1.ancestor(p2)
1141 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1142 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1142
1143
1143 def func(f):
1144 def func(f):
1144 f = copiesget(f, f) # may be wrong for merges with copies
1145 f = copiesget(f, f) # may be wrong for merges with copies
1145 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1146 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1146 if fl1 == fl2:
1147 if fl1 == fl2:
1147 return fl1
1148 return fl1
1148 if fl1 == fla:
1149 if fl1 == fla:
1149 return fl2
1150 return fl2
1150 if fl2 == fla:
1151 if fl2 == fla:
1151 return fl1
1152 return fl1
1152 return '' # punt for conflicts
1153 return '' # punt for conflicts
1153
1154
1154 return func
1155 return func
1155
1156
1156 @propertycache
1157 @propertycache
1157 def _flagfunc(self):
1158 def _flagfunc(self):
1158 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1159 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1159
1160
1160 @propertycache
1161 @propertycache
1161 def _manifest(self):
1162 def _manifest(self):
1162 """generate a manifest corresponding to the values in self._status
1163 """generate a manifest corresponding to the values in self._status
1163
1164
1164 This reuse the file nodeid from parent, but we append an extra letter
1165 This reuse the file nodeid from parent, but we append an extra letter
1165 when modified. Modified files get an extra 'm' while added files get
1166 when modified. Modified files get an extra 'm' while added files get
1166 an extra 'a'. This is used by manifests merge to see that files
1167 an extra 'a'. This is used by manifests merge to see that files
1167 are different and by update logic to avoid deleting newly added files.
1168 are different and by update logic to avoid deleting newly added files.
1168 """
1169 """
1169
1170
1170 man1 = self._parents[0].manifest()
1171 man1 = self._parents[0].manifest()
1171 man = man1.copy()
1172 man = man1.copy()
1172 if len(self._parents) > 1:
1173 if len(self._parents) > 1:
1173 man2 = self.p2().manifest()
1174 man2 = self.p2().manifest()
1174 def getman(f):
1175 def getman(f):
1175 if f in man1:
1176 if f in man1:
1176 return man1
1177 return man1
1177 return man2
1178 return man2
1178 else:
1179 else:
1179 getman = lambda f: man1
1180 getman = lambda f: man1
1180
1181
1181 copied = self._repo.dirstate.copies()
1182 copied = self._repo.dirstate.copies()
1182 ff = self._flagfunc
1183 ff = self._flagfunc
1183 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1184 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1184 for f in l:
1185 for f in l:
1185 orig = copied.get(f, f)
1186 orig = copied.get(f, f)
1186 man[f] = getman(orig).get(orig, nullid) + i
1187 man[f] = getman(orig).get(orig, nullid) + i
1187 try:
1188 try:
1188 man.setflag(f, ff(f))
1189 man.setflag(f, ff(f))
1189 except OSError:
1190 except OSError:
1190 pass
1191 pass
1191
1192
1192 for f in self._status.deleted + self._status.removed:
1193 for f in self._status.deleted + self._status.removed:
1193 if f in man:
1194 if f in man:
1194 del man[f]
1195 del man[f]
1195
1196
1196 return man
1197 return man
1197
1198
1198 @propertycache
1199 @propertycache
1199 def _status(self):
1200 def _status(self):
1200 return self._repo.status()
1201 return self._repo.status()
1201
1202
1202 @propertycache
1203 @propertycache
1203 def _user(self):
1204 def _user(self):
1204 return self._repo.ui.username()
1205 return self._repo.ui.username()
1205
1206
1206 @propertycache
1207 @propertycache
1207 def _date(self):
1208 def _date(self):
1208 return util.makedate()
1209 return util.makedate()
1209
1210
1210 def subrev(self, subpath):
1211 def subrev(self, subpath):
1211 return None
1212 return None
1212
1213
1213 def manifestnode(self):
1214 def manifestnode(self):
1214 return None
1215 return None
1215 def user(self):
1216 def user(self):
1216 return self._user or self._repo.ui.username()
1217 return self._user or self._repo.ui.username()
1217 def date(self):
1218 def date(self):
1218 return self._date
1219 return self._date
1219 def description(self):
1220 def description(self):
1220 return self._text
1221 return self._text
1221 def files(self):
1222 def files(self):
1222 return sorted(self._status.modified + self._status.added +
1223 return sorted(self._status.modified + self._status.added +
1223 self._status.removed)
1224 self._status.removed)
1224
1225
1225 def modified(self):
1226 def modified(self):
1226 return self._status.modified
1227 return self._status.modified
1227 def added(self):
1228 def added(self):
1228 return self._status.added
1229 return self._status.added
1229 def removed(self):
1230 def removed(self):
1230 return self._status.removed
1231 return self._status.removed
1231 def deleted(self):
1232 def deleted(self):
1232 return self._status.deleted
1233 return self._status.deleted
1233 def branch(self):
1234 def branch(self):
1234 return encoding.tolocal(self._extra['branch'])
1235 return encoding.tolocal(self._extra['branch'])
1235 def closesbranch(self):
1236 def closesbranch(self):
1236 return 'close' in self._extra
1237 return 'close' in self._extra
1237 def extra(self):
1238 def extra(self):
1238 return self._extra
1239 return self._extra
1239
1240
1240 def tags(self):
1241 def tags(self):
1241 t = []
1242 t = []
1242 for p in self.parents():
1243 for p in self.parents():
1243 t.extend(p.tags())
1244 t.extend(p.tags())
1244 return t
1245 return t
1245
1246
1246 def bookmarks(self):
1247 def bookmarks(self):
1247 b = []
1248 b = []
1248 for p in self.parents():
1249 for p in self.parents():
1249 b.extend(p.bookmarks())
1250 b.extend(p.bookmarks())
1250 return b
1251 return b
1251
1252
1252 def phase(self):
1253 def phase(self):
1253 phase = phases.draft # default phase to draft
1254 phase = phases.draft # default phase to draft
1254 for p in self.parents():
1255 for p in self.parents():
1255 phase = max(phase, p.phase())
1256 phase = max(phase, p.phase())
1256 return phase
1257 return phase
1257
1258
1258 def hidden(self):
1259 def hidden(self):
1259 return False
1260 return False
1260
1261
1261 def children(self):
1262 def children(self):
1262 return []
1263 return []
1263
1264
1264 def flags(self, path):
1265 def flags(self, path):
1265 if '_manifest' in self.__dict__:
1266 if '_manifest' in self.__dict__:
1266 try:
1267 try:
1267 return self._manifest.flags(path)
1268 return self._manifest.flags(path)
1268 except KeyError:
1269 except KeyError:
1269 return ''
1270 return ''
1270
1271
1271 try:
1272 try:
1272 return self._flagfunc(path)
1273 return self._flagfunc(path)
1273 except OSError:
1274 except OSError:
1274 return ''
1275 return ''
1275
1276
1276 def ancestor(self, c2):
1277 def ancestor(self, c2):
1277 """return the "best" ancestor context of self and c2"""
1278 """return the "best" ancestor context of self and c2"""
1278 return self._parents[0].ancestor(c2) # punt on two parents for now
1279 return self._parents[0].ancestor(c2) # punt on two parents for now
1279
1280
1280 def walk(self, match):
1281 def walk(self, match):
1281 '''Generates matching file names.'''
1282 '''Generates matching file names.'''
1282 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1283 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1283 True, False))
1284 True, False))
1284
1285
1285 def matches(self, match):
1286 def matches(self, match):
1286 return sorted(self._repo.dirstate.matches(match))
1287 return sorted(self._repo.dirstate.matches(match))
1287
1288
1288 def ancestors(self):
1289 def ancestors(self):
1289 for p in self._parents:
1290 for p in self._parents:
1290 yield p
1291 yield p
1291 for a in self._repo.changelog.ancestors(
1292 for a in self._repo.changelog.ancestors(
1292 [p.rev() for p in self._parents]):
1293 [p.rev() for p in self._parents]):
1293 yield changectx(self._repo, a)
1294 yield changectx(self._repo, a)
1294
1295
1295 def markcommitted(self, node):
1296 def markcommitted(self, node):
1296 """Perform post-commit cleanup necessary after committing this ctx
1297 """Perform post-commit cleanup necessary after committing this ctx
1297
1298
1298 Specifically, this updates backing stores this working context
1299 Specifically, this updates backing stores this working context
1299 wraps to reflect the fact that the changes reflected by this
1300 wraps to reflect the fact that the changes reflected by this
1300 workingctx have been committed. For example, it marks
1301 workingctx have been committed. For example, it marks
1301 modified and added files as normal in the dirstate.
1302 modified and added files as normal in the dirstate.
1302
1303
1303 """
1304 """
1304
1305
1305 self._repo.dirstate.beginparentchange()
1306 self._repo.dirstate.beginparentchange()
1306 for f in self.modified() + self.added():
1307 for f in self.modified() + self.added():
1307 self._repo.dirstate.normal(f)
1308 self._repo.dirstate.normal(f)
1308 for f in self.removed():
1309 for f in self.removed():
1309 self._repo.dirstate.drop(f)
1310 self._repo.dirstate.drop(f)
1310 self._repo.dirstate.setparents(node)
1311 self._repo.dirstate.setparents(node)
1311 self._repo.dirstate.endparentchange()
1312 self._repo.dirstate.endparentchange()
1312
1313
1313 class workingctx(committablectx):
1314 class workingctx(committablectx):
1314 """A workingctx object makes access to data related to
1315 """A workingctx object makes access to data related to
1315 the current working directory convenient.
1316 the current working directory convenient.
1316 date - any valid date string or (unixtime, offset), or None.
1317 date - any valid date string or (unixtime, offset), or None.
1317 user - username string, or None.
1318 user - username string, or None.
1318 extra - a dictionary of extra values, or None.
1319 extra - a dictionary of extra values, or None.
1319 changes - a list of file lists as returned by localrepo.status()
1320 changes - a list of file lists as returned by localrepo.status()
1320 or None to use the repository status.
1321 or None to use the repository status.
1321 """
1322 """
1322 def __init__(self, repo, text="", user=None, date=None, extra=None,
1323 def __init__(self, repo, text="", user=None, date=None, extra=None,
1323 changes=None):
1324 changes=None):
1324 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1325 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1325
1326
1326 def __iter__(self):
1327 def __iter__(self):
1327 d = self._repo.dirstate
1328 d = self._repo.dirstate
1328 for f in d:
1329 for f in d:
1329 if d[f] != 'r':
1330 if d[f] != 'r':
1330 yield f
1331 yield f
1331
1332
1332 def __contains__(self, key):
1333 def __contains__(self, key):
1333 return self._repo.dirstate[key] not in "?r"
1334 return self._repo.dirstate[key] not in "?r"
1334
1335
1335 @propertycache
1336 @propertycache
1336 def _parents(self):
1337 def _parents(self):
1337 p = self._repo.dirstate.parents()
1338 p = self._repo.dirstate.parents()
1338 if p[1] == nullid:
1339 if p[1] == nullid:
1339 p = p[:-1]
1340 p = p[:-1]
1340 return [changectx(self._repo, x) for x in p]
1341 return [changectx(self._repo, x) for x in p]
1341
1342
1342 def filectx(self, path, filelog=None):
1343 def filectx(self, path, filelog=None):
1343 """get a file context from the working directory"""
1344 """get a file context from the working directory"""
1344 return workingfilectx(self._repo, path, workingctx=self,
1345 return workingfilectx(self._repo, path, workingctx=self,
1345 filelog=filelog)
1346 filelog=filelog)
1346
1347
1347 def dirty(self, missing=False, merge=True, branch=True):
1348 def dirty(self, missing=False, merge=True, branch=True):
1348 "check whether a working directory is modified"
1349 "check whether a working directory is modified"
1349 # check subrepos first
1350 # check subrepos first
1350 for s in sorted(self.substate):
1351 for s in sorted(self.substate):
1351 if self.sub(s).dirty():
1352 if self.sub(s).dirty():
1352 return True
1353 return True
1353 # check current working dir
1354 # check current working dir
1354 return ((merge and self.p2()) or
1355 return ((merge and self.p2()) or
1355 (branch and self.branch() != self.p1().branch()) or
1356 (branch and self.branch() != self.p1().branch()) or
1356 self.modified() or self.added() or self.removed() or
1357 self.modified() or self.added() or self.removed() or
1357 (missing and self.deleted()))
1358 (missing and self.deleted()))
1358
1359
1359 def add(self, list, prefix=""):
1360 def add(self, list, prefix=""):
1360 join = lambda f: os.path.join(prefix, f)
1361 join = lambda f: os.path.join(prefix, f)
1361 wlock = self._repo.wlock()
1362 wlock = self._repo.wlock()
1362 ui, ds = self._repo.ui, self._repo.dirstate
1363 ui, ds = self._repo.ui, self._repo.dirstate
1363 try:
1364 try:
1364 rejected = []
1365 rejected = []
1365 lstat = self._repo.wvfs.lstat
1366 lstat = self._repo.wvfs.lstat
1366 for f in list:
1367 for f in list:
1367 scmutil.checkportable(ui, join(f))
1368 scmutil.checkportable(ui, join(f))
1368 try:
1369 try:
1369 st = lstat(f)
1370 st = lstat(f)
1370 except OSError:
1371 except OSError:
1371 ui.warn(_("%s does not exist!\n") % join(f))
1372 ui.warn(_("%s does not exist!\n") % join(f))
1372 rejected.append(f)
1373 rejected.append(f)
1373 continue
1374 continue
1374 if st.st_size > 10000000:
1375 if st.st_size > 10000000:
1375 ui.warn(_("%s: up to %d MB of RAM may be required "
1376 ui.warn(_("%s: up to %d MB of RAM may be required "
1376 "to manage this file\n"
1377 "to manage this file\n"
1377 "(use 'hg revert %s' to cancel the "
1378 "(use 'hg revert %s' to cancel the "
1378 "pending addition)\n")
1379 "pending addition)\n")
1379 % (f, 3 * st.st_size // 1000000, join(f)))
1380 % (f, 3 * st.st_size // 1000000, join(f)))
1380 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1381 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1381 ui.warn(_("%s not added: only files and symlinks "
1382 ui.warn(_("%s not added: only files and symlinks "
1382 "supported currently\n") % join(f))
1383 "supported currently\n") % join(f))
1383 rejected.append(f)
1384 rejected.append(f)
1384 elif ds[f] in 'amn':
1385 elif ds[f] in 'amn':
1385 ui.warn(_("%s already tracked!\n") % join(f))
1386 ui.warn(_("%s already tracked!\n") % join(f))
1386 elif ds[f] == 'r':
1387 elif ds[f] == 'r':
1387 ds.normallookup(f)
1388 ds.normallookup(f)
1388 else:
1389 else:
1389 ds.add(f)
1390 ds.add(f)
1390 return rejected
1391 return rejected
1391 finally:
1392 finally:
1392 wlock.release()
1393 wlock.release()
1393
1394
1394 def forget(self, files, prefix=""):
1395 def forget(self, files, prefix=""):
1395 join = lambda f: os.path.join(prefix, f)
1396 join = lambda f: os.path.join(prefix, f)
1396 wlock = self._repo.wlock()
1397 wlock = self._repo.wlock()
1397 try:
1398 try:
1398 rejected = []
1399 rejected = []
1399 for f in files:
1400 for f in files:
1400 if f not in self._repo.dirstate:
1401 if f not in self._repo.dirstate:
1401 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1402 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1402 rejected.append(f)
1403 rejected.append(f)
1403 elif self._repo.dirstate[f] != 'a':
1404 elif self._repo.dirstate[f] != 'a':
1404 self._repo.dirstate.remove(f)
1405 self._repo.dirstate.remove(f)
1405 else:
1406 else:
1406 self._repo.dirstate.drop(f)
1407 self._repo.dirstate.drop(f)
1407 return rejected
1408 return rejected
1408 finally:
1409 finally:
1409 wlock.release()
1410 wlock.release()
1410
1411
1411 def undelete(self, list):
1412 def undelete(self, list):
1412 pctxs = self.parents()
1413 pctxs = self.parents()
1413 wlock = self._repo.wlock()
1414 wlock = self._repo.wlock()
1414 try:
1415 try:
1415 for f in list:
1416 for f in list:
1416 if self._repo.dirstate[f] != 'r':
1417 if self._repo.dirstate[f] != 'r':
1417 self._repo.ui.warn(_("%s not removed!\n") % f)
1418 self._repo.ui.warn(_("%s not removed!\n") % f)
1418 else:
1419 else:
1419 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1420 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1420 t = fctx.data()
1421 t = fctx.data()
1421 self._repo.wwrite(f, t, fctx.flags())
1422 self._repo.wwrite(f, t, fctx.flags())
1422 self._repo.dirstate.normal(f)
1423 self._repo.dirstate.normal(f)
1423 finally:
1424 finally:
1424 wlock.release()
1425 wlock.release()
1425
1426
1426 def copy(self, source, dest):
1427 def copy(self, source, dest):
1427 try:
1428 try:
1428 st = self._repo.wvfs.lstat(dest)
1429 st = self._repo.wvfs.lstat(dest)
1429 except OSError, err:
1430 except OSError, err:
1430 if err.errno != errno.ENOENT:
1431 if err.errno != errno.ENOENT:
1431 raise
1432 raise
1432 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1433 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1433 return
1434 return
1434 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1435 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1435 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1436 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1436 "symbolic link\n") % dest)
1437 "symbolic link\n") % dest)
1437 else:
1438 else:
1438 wlock = self._repo.wlock()
1439 wlock = self._repo.wlock()
1439 try:
1440 try:
1440 if self._repo.dirstate[dest] in '?':
1441 if self._repo.dirstate[dest] in '?':
1441 self._repo.dirstate.add(dest)
1442 self._repo.dirstate.add(dest)
1442 elif self._repo.dirstate[dest] in 'r':
1443 elif self._repo.dirstate[dest] in 'r':
1443 self._repo.dirstate.normallookup(dest)
1444 self._repo.dirstate.normallookup(dest)
1444 self._repo.dirstate.copy(source, dest)
1445 self._repo.dirstate.copy(source, dest)
1445 finally:
1446 finally:
1446 wlock.release()
1447 wlock.release()
1447
1448
1448 def match(self, pats=[], include=None, exclude=None, default='glob',
1449 def match(self, pats=[], include=None, exclude=None, default='glob',
1449 listsubrepos=False):
1450 listsubrepos=False):
1450 r = self._repo
1451 r = self._repo
1451
1452
1452 # Only a case insensitive filesystem needs magic to translate user input
1453 # Only a case insensitive filesystem needs magic to translate user input
1453 # to actual case in the filesystem.
1454 # to actual case in the filesystem.
1454 if not util.checkcase(r.root):
1455 if not util.checkcase(r.root):
1455 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1456 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1456 exclude, default, r.auditor, self,
1457 exclude, default, r.auditor, self,
1457 listsubrepos=listsubrepos)
1458 listsubrepos=listsubrepos)
1458 return matchmod.match(r.root, r.getcwd(), pats,
1459 return matchmod.match(r.root, r.getcwd(), pats,
1459 include, exclude, default,
1460 include, exclude, default,
1460 auditor=r.auditor, ctx=self,
1461 auditor=r.auditor, ctx=self,
1461 listsubrepos=listsubrepos)
1462 listsubrepos=listsubrepos)
1462
1463
1463 def _filtersuspectsymlink(self, files):
1464 def _filtersuspectsymlink(self, files):
1464 if not files or self._repo.dirstate._checklink:
1465 if not files or self._repo.dirstate._checklink:
1465 return files
1466 return files
1466
1467
1467 # Symlink placeholders may get non-symlink-like contents
1468 # Symlink placeholders may get non-symlink-like contents
1468 # via user error or dereferencing by NFS or Samba servers,
1469 # via user error or dereferencing by NFS or Samba servers,
1469 # so we filter out any placeholders that don't look like a
1470 # so we filter out any placeholders that don't look like a
1470 # symlink
1471 # symlink
1471 sane = []
1472 sane = []
1472 for f in files:
1473 for f in files:
1473 if self.flags(f) == 'l':
1474 if self.flags(f) == 'l':
1474 d = self[f].data()
1475 d = self[f].data()
1475 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1476 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1476 self._repo.ui.debug('ignoring suspect symlink placeholder'
1477 self._repo.ui.debug('ignoring suspect symlink placeholder'
1477 ' "%s"\n' % f)
1478 ' "%s"\n' % f)
1478 continue
1479 continue
1479 sane.append(f)
1480 sane.append(f)
1480 return sane
1481 return sane
1481
1482
1482 def _checklookup(self, files):
1483 def _checklookup(self, files):
1483 # check for any possibly clean files
1484 # check for any possibly clean files
1484 if not files:
1485 if not files:
1485 return [], []
1486 return [], []
1486
1487
1487 modified = []
1488 modified = []
1488 fixup = []
1489 fixup = []
1489 pctx = self._parents[0]
1490 pctx = self._parents[0]
1490 # do a full compare of any files that might have changed
1491 # do a full compare of any files that might have changed
1491 for f in sorted(files):
1492 for f in sorted(files):
1492 if (f not in pctx or self.flags(f) != pctx.flags(f)
1493 if (f not in pctx or self.flags(f) != pctx.flags(f)
1493 or pctx[f].cmp(self[f])):
1494 or pctx[f].cmp(self[f])):
1494 modified.append(f)
1495 modified.append(f)
1495 else:
1496 else:
1496 fixup.append(f)
1497 fixup.append(f)
1497
1498
1498 # update dirstate for files that are actually clean
1499 # update dirstate for files that are actually clean
1499 if fixup:
1500 if fixup:
1500 try:
1501 try:
1501 # updating the dirstate is optional
1502 # updating the dirstate is optional
1502 # so we don't wait on the lock
1503 # so we don't wait on the lock
1503 # wlock can invalidate the dirstate, so cache normal _after_
1504 # wlock can invalidate the dirstate, so cache normal _after_
1504 # taking the lock
1505 # taking the lock
1505 wlock = self._repo.wlock(False)
1506 wlock = self._repo.wlock(False)
1506 normal = self._repo.dirstate.normal
1507 normal = self._repo.dirstate.normal
1507 try:
1508 try:
1508 for f in fixup:
1509 for f in fixup:
1509 normal(f)
1510 normal(f)
1510 finally:
1511 finally:
1511 wlock.release()
1512 wlock.release()
1512 except error.LockError:
1513 except error.LockError:
1513 pass
1514 pass
1514 return modified, fixup
1515 return modified, fixup
1515
1516
1516 def _manifestmatches(self, match, s):
1517 def _manifestmatches(self, match, s):
1517 """Slow path for workingctx
1518 """Slow path for workingctx
1518
1519
1519 The fast path is when we compare the working directory to its parent
1520 The fast path is when we compare the working directory to its parent
1520 which means this function is comparing with a non-parent; therefore we
1521 which means this function is comparing with a non-parent; therefore we
1521 need to build a manifest and return what matches.
1522 need to build a manifest and return what matches.
1522 """
1523 """
1523 mf = self._repo['.']._manifestmatches(match, s)
1524 mf = self._repo['.']._manifestmatches(match, s)
1524 for f in s.modified + s.added:
1525 for f in s.modified + s.added:
1525 mf[f] = _newnode
1526 mf[f] = _newnode
1526 mf.setflag(f, self.flags(f))
1527 mf.setflag(f, self.flags(f))
1527 for f in s.removed:
1528 for f in s.removed:
1528 if f in mf:
1529 if f in mf:
1529 del mf[f]
1530 del mf[f]
1530 return mf
1531 return mf
1531
1532
1532 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1533 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1533 unknown=False):
1534 unknown=False):
1534 '''Gets the status from the dirstate -- internal use only.'''
1535 '''Gets the status from the dirstate -- internal use only.'''
1535 listignored, listclean, listunknown = ignored, clean, unknown
1536 listignored, listclean, listunknown = ignored, clean, unknown
1536 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1537 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1537 subrepos = []
1538 subrepos = []
1538 if '.hgsub' in self:
1539 if '.hgsub' in self:
1539 subrepos = sorted(self.substate)
1540 subrepos = sorted(self.substate)
1540 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1541 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1541 listclean, listunknown)
1542 listclean, listunknown)
1542
1543
1543 # check for any possibly clean files
1544 # check for any possibly clean files
1544 if cmp:
1545 if cmp:
1545 modified2, fixup = self._checklookup(cmp)
1546 modified2, fixup = self._checklookup(cmp)
1546 s.modified.extend(modified2)
1547 s.modified.extend(modified2)
1547
1548
1548 # update dirstate for files that are actually clean
1549 # update dirstate for files that are actually clean
1549 if fixup and listclean:
1550 if fixup and listclean:
1550 s.clean.extend(fixup)
1551 s.clean.extend(fixup)
1551
1552
1552 if match.always():
1553 if match.always():
1553 # cache for performance
1554 # cache for performance
1554 if s.unknown or s.ignored or s.clean:
1555 if s.unknown or s.ignored or s.clean:
1555 # "_status" is cached with list*=False in the normal route
1556 # "_status" is cached with list*=False in the normal route
1556 self._status = scmutil.status(s.modified, s.added, s.removed,
1557 self._status = scmutil.status(s.modified, s.added, s.removed,
1557 s.deleted, [], [], [])
1558 s.deleted, [], [], [])
1558 else:
1559 else:
1559 self._status = s
1560 self._status = s
1560
1561
1561 return s
1562 return s
1562
1563
1563 def _buildstatus(self, other, s, match, listignored, listclean,
1564 def _buildstatus(self, other, s, match, listignored, listclean,
1564 listunknown):
1565 listunknown):
1565 """build a status with respect to another context
1566 """build a status with respect to another context
1566
1567
1567 This includes logic for maintaining the fast path of status when
1568 This includes logic for maintaining the fast path of status when
1568 comparing the working directory against its parent, which is to skip
1569 comparing the working directory against its parent, which is to skip
1569 building a new manifest if self (working directory) is not comparing
1570 building a new manifest if self (working directory) is not comparing
1570 against its parent (repo['.']).
1571 against its parent (repo['.']).
1571 """
1572 """
1572 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1573 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1573 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1574 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1574 # might have accidentally ended up with the entire contents of the file
1575 # might have accidentally ended up with the entire contents of the file
1575 # they are supposed to be linking to.
1576 # they are supposed to be linking to.
1576 s.modified[:] = self._filtersuspectsymlink(s.modified)
1577 s.modified[:] = self._filtersuspectsymlink(s.modified)
1577 if other != self._repo['.']:
1578 if other != self._repo['.']:
1578 s = super(workingctx, self)._buildstatus(other, s, match,
1579 s = super(workingctx, self)._buildstatus(other, s, match,
1579 listignored, listclean,
1580 listignored, listclean,
1580 listunknown)
1581 listunknown)
1581 return s
1582 return s
1582
1583
1583 def _matchstatus(self, other, match):
1584 def _matchstatus(self, other, match):
1584 """override the match method with a filter for directory patterns
1585 """override the match method with a filter for directory patterns
1585
1586
1586 We use inheritance to customize the match.bad method only in cases of
1587 We use inheritance to customize the match.bad method only in cases of
1587 workingctx since it belongs only to the working directory when
1588 workingctx since it belongs only to the working directory when
1588 comparing against the parent changeset.
1589 comparing against the parent changeset.
1589
1590
1590 If we aren't comparing against the working directory's parent, then we
1591 If we aren't comparing against the working directory's parent, then we
1591 just use the default match object sent to us.
1592 just use the default match object sent to us.
1592 """
1593 """
1593 superself = super(workingctx, self)
1594 superself = super(workingctx, self)
1594 match = superself._matchstatus(other, match)
1595 match = superself._matchstatus(other, match)
1595 if other != self._repo['.']:
1596 if other != self._repo['.']:
1596 def bad(f, msg):
1597 def bad(f, msg):
1597 # 'f' may be a directory pattern from 'match.files()',
1598 # 'f' may be a directory pattern from 'match.files()',
1598 # so 'f not in ctx1' is not enough
1599 # so 'f not in ctx1' is not enough
1599 if f not in other and not other.hasdir(f):
1600 if f not in other and not other.hasdir(f):
1600 self._repo.ui.warn('%s: %s\n' %
1601 self._repo.ui.warn('%s: %s\n' %
1601 (self._repo.dirstate.pathto(f), msg))
1602 (self._repo.dirstate.pathto(f), msg))
1602 match.bad = bad
1603 match.bad = bad
1603 return match
1604 return match
1604
1605
1605 class committablefilectx(basefilectx):
1606 class committablefilectx(basefilectx):
1606 """A committablefilectx provides common functionality for a file context
1607 """A committablefilectx provides common functionality for a file context
1607 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1608 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1608 def __init__(self, repo, path, filelog=None, ctx=None):
1609 def __init__(self, repo, path, filelog=None, ctx=None):
1609 self._repo = repo
1610 self._repo = repo
1610 self._path = path
1611 self._path = path
1611 self._changeid = None
1612 self._changeid = None
1612 self._filerev = self._filenode = None
1613 self._filerev = self._filenode = None
1613
1614
1614 if filelog is not None:
1615 if filelog is not None:
1615 self._filelog = filelog
1616 self._filelog = filelog
1616 if ctx:
1617 if ctx:
1617 self._changectx = ctx
1618 self._changectx = ctx
1618
1619
1619 def __nonzero__(self):
1620 def __nonzero__(self):
1620 return True
1621 return True
1621
1622
1622 def linkrev(self):
1623 def linkrev(self):
1623 # linked to self._changectx no matter if file is modified or not
1624 # linked to self._changectx no matter if file is modified or not
1624 return self.rev()
1625 return self.rev()
1625
1626
1626 def parents(self):
1627 def parents(self):
1627 '''return parent filectxs, following copies if necessary'''
1628 '''return parent filectxs, following copies if necessary'''
1628 def filenode(ctx, path):
1629 def filenode(ctx, path):
1629 return ctx._manifest.get(path, nullid)
1630 return ctx._manifest.get(path, nullid)
1630
1631
1631 path = self._path
1632 path = self._path
1632 fl = self._filelog
1633 fl = self._filelog
1633 pcl = self._changectx._parents
1634 pcl = self._changectx._parents
1634 renamed = self.renamed()
1635 renamed = self.renamed()
1635
1636
1636 if renamed:
1637 if renamed:
1637 pl = [renamed + (None,)]
1638 pl = [renamed + (None,)]
1638 else:
1639 else:
1639 pl = [(path, filenode(pcl[0], path), fl)]
1640 pl = [(path, filenode(pcl[0], path), fl)]
1640
1641
1641 for pc in pcl[1:]:
1642 for pc in pcl[1:]:
1642 pl.append((path, filenode(pc, path), fl))
1643 pl.append((path, filenode(pc, path), fl))
1643
1644
1644 return [self._parentfilectx(p, fileid=n, filelog=l)
1645 return [self._parentfilectx(p, fileid=n, filelog=l)
1645 for p, n, l in pl if n != nullid]
1646 for p, n, l in pl if n != nullid]
1646
1647
1647 def children(self):
1648 def children(self):
1648 return []
1649 return []
1649
1650
1650 class workingfilectx(committablefilectx):
1651 class workingfilectx(committablefilectx):
1651 """A workingfilectx object makes access to data related to a particular
1652 """A workingfilectx object makes access to data related to a particular
1652 file in the working directory convenient."""
1653 file in the working directory convenient."""
1653 def __init__(self, repo, path, filelog=None, workingctx=None):
1654 def __init__(self, repo, path, filelog=None, workingctx=None):
1654 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1655 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1655
1656
1656 @propertycache
1657 @propertycache
1657 def _changectx(self):
1658 def _changectx(self):
1658 return workingctx(self._repo)
1659 return workingctx(self._repo)
1659
1660
1660 def data(self):
1661 def data(self):
1661 return self._repo.wread(self._path)
1662 return self._repo.wread(self._path)
1662 def renamed(self):
1663 def renamed(self):
1663 rp = self._repo.dirstate.copied(self._path)
1664 rp = self._repo.dirstate.copied(self._path)
1664 if not rp:
1665 if not rp:
1665 return None
1666 return None
1666 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1667 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1667
1668
1668 def size(self):
1669 def size(self):
1669 return self._repo.wvfs.lstat(self._path).st_size
1670 return self._repo.wvfs.lstat(self._path).st_size
1670 def date(self):
1671 def date(self):
1671 t, tz = self._changectx.date()
1672 t, tz = self._changectx.date()
1672 try:
1673 try:
1673 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1674 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1674 except OSError, err:
1675 except OSError, err:
1675 if err.errno != errno.ENOENT:
1676 if err.errno != errno.ENOENT:
1676 raise
1677 raise
1677 return (t, tz)
1678 return (t, tz)
1678
1679
1679 def cmp(self, fctx):
1680 def cmp(self, fctx):
1680 """compare with other file context
1681 """compare with other file context
1681
1682
1682 returns True if different than fctx.
1683 returns True if different than fctx.
1683 """
1684 """
1684 # fctx should be a filectx (not a workingfilectx)
1685 # fctx should be a filectx (not a workingfilectx)
1685 # invert comparison to reuse the same code path
1686 # invert comparison to reuse the same code path
1686 return fctx.cmp(self)
1687 return fctx.cmp(self)
1687
1688
1688 def remove(self, ignoremissing=False):
1689 def remove(self, ignoremissing=False):
1689 """wraps unlink for a repo's working directory"""
1690 """wraps unlink for a repo's working directory"""
1690 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1691 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1691
1692
1692 def write(self, data, flags):
1693 def write(self, data, flags):
1693 """wraps repo.wwrite"""
1694 """wraps repo.wwrite"""
1694 self._repo.wwrite(self._path, data, flags)
1695 self._repo.wwrite(self._path, data, flags)
1695
1696
1696 class workingcommitctx(workingctx):
1697 class workingcommitctx(workingctx):
1697 """A workingcommitctx object makes access to data related to
1698 """A workingcommitctx object makes access to data related to
1698 the revision being committed convenient.
1699 the revision being committed convenient.
1699
1700
1700 This hides changes in the working directory, if they aren't
1701 This hides changes in the working directory, if they aren't
1701 committed in this context.
1702 committed in this context.
1702 """
1703 """
1703 def __init__(self, repo, changes,
1704 def __init__(self, repo, changes,
1704 text="", user=None, date=None, extra=None):
1705 text="", user=None, date=None, extra=None):
1705 super(workingctx, self).__init__(repo, text, user, date, extra,
1706 super(workingctx, self).__init__(repo, text, user, date, extra,
1706 changes)
1707 changes)
1707
1708
1708 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1709 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1709 unknown=False):
1710 unknown=False):
1710 """Return matched files only in ``self._status``
1711 """Return matched files only in ``self._status``
1711
1712
1712 Uncommitted files appear "clean" via this context, even if
1713 Uncommitted files appear "clean" via this context, even if
1713 they aren't actually so in the working directory.
1714 they aren't actually so in the working directory.
1714 """
1715 """
1715 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1716 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1716 if clean:
1717 if clean:
1717 clean = [f for f in self._manifest if f not in self._changedset]
1718 clean = [f for f in self._manifest if f not in self._changedset]
1718 else:
1719 else:
1719 clean = []
1720 clean = []
1720 return scmutil.status([f for f in self._status.modified if match(f)],
1721 return scmutil.status([f for f in self._status.modified if match(f)],
1721 [f for f in self._status.added if match(f)],
1722 [f for f in self._status.added if match(f)],
1722 [f for f in self._status.removed if match(f)],
1723 [f for f in self._status.removed if match(f)],
1723 [], [], [], clean)
1724 [], [], [], clean)
1724
1725
1725 @propertycache
1726 @propertycache
1726 def _changedset(self):
1727 def _changedset(self):
1727 """Return the set of files changed in this context
1728 """Return the set of files changed in this context
1728 """
1729 """
1729 changed = set(self._status.modified)
1730 changed = set(self._status.modified)
1730 changed.update(self._status.added)
1731 changed.update(self._status.added)
1731 changed.update(self._status.removed)
1732 changed.update(self._status.removed)
1732 return changed
1733 return changed
1733
1734
1734 class memctx(committablectx):
1735 class memctx(committablectx):
1735 """Use memctx to perform in-memory commits via localrepo.commitctx().
1736 """Use memctx to perform in-memory commits via localrepo.commitctx().
1736
1737
1737 Revision information is supplied at initialization time while
1738 Revision information is supplied at initialization time while
1738 related files data and is made available through a callback
1739 related files data and is made available through a callback
1739 mechanism. 'repo' is the current localrepo, 'parents' is a
1740 mechanism. 'repo' is the current localrepo, 'parents' is a
1740 sequence of two parent revisions identifiers (pass None for every
1741 sequence of two parent revisions identifiers (pass None for every
1741 missing parent), 'text' is the commit message and 'files' lists
1742 missing parent), 'text' is the commit message and 'files' lists
1742 names of files touched by the revision (normalized and relative to
1743 names of files touched by the revision (normalized and relative to
1743 repository root).
1744 repository root).
1744
1745
1745 filectxfn(repo, memctx, path) is a callable receiving the
1746 filectxfn(repo, memctx, path) is a callable receiving the
1746 repository, the current memctx object and the normalized path of
1747 repository, the current memctx object and the normalized path of
1747 requested file, relative to repository root. It is fired by the
1748 requested file, relative to repository root. It is fired by the
1748 commit function for every file in 'files', but calls order is
1749 commit function for every file in 'files', but calls order is
1749 undefined. If the file is available in the revision being
1750 undefined. If the file is available in the revision being
1750 committed (updated or added), filectxfn returns a memfilectx
1751 committed (updated or added), filectxfn returns a memfilectx
1751 object. If the file was removed, filectxfn raises an
1752 object. If the file was removed, filectxfn raises an
1752 IOError. Moved files are represented by marking the source file
1753 IOError. Moved files are represented by marking the source file
1753 removed and the new file added with copy information (see
1754 removed and the new file added with copy information (see
1754 memfilectx).
1755 memfilectx).
1755
1756
1756 user receives the committer name and defaults to current
1757 user receives the committer name and defaults to current
1757 repository username, date is the commit date in any format
1758 repository username, date is the commit date in any format
1758 supported by util.parsedate() and defaults to current date, extra
1759 supported by util.parsedate() and defaults to current date, extra
1759 is a dictionary of metadata or is left empty.
1760 is a dictionary of metadata or is left empty.
1760 """
1761 """
1761
1762
1762 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1763 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1763 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1764 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1764 # this field to determine what to do in filectxfn.
1765 # this field to determine what to do in filectxfn.
1765 _returnnoneformissingfiles = True
1766 _returnnoneformissingfiles = True
1766
1767
1767 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1768 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1768 date=None, extra=None, editor=False):
1769 date=None, extra=None, editor=False):
1769 super(memctx, self).__init__(repo, text, user, date, extra)
1770 super(memctx, self).__init__(repo, text, user, date, extra)
1770 self._rev = None
1771 self._rev = None
1771 self._node = None
1772 self._node = None
1772 parents = [(p or nullid) for p in parents]
1773 parents = [(p or nullid) for p in parents]
1773 p1, p2 = parents
1774 p1, p2 = parents
1774 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1775 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1775 files = sorted(set(files))
1776 files = sorted(set(files))
1776 self._files = files
1777 self._files = files
1777 self.substate = {}
1778 self.substate = {}
1778
1779
1779 # if store is not callable, wrap it in a function
1780 # if store is not callable, wrap it in a function
1780 if not callable(filectxfn):
1781 if not callable(filectxfn):
1781 def getfilectx(repo, memctx, path):
1782 def getfilectx(repo, memctx, path):
1782 fctx = filectxfn[path]
1783 fctx = filectxfn[path]
1783 # this is weird but apparently we only keep track of one parent
1784 # this is weird but apparently we only keep track of one parent
1784 # (why not only store that instead of a tuple?)
1785 # (why not only store that instead of a tuple?)
1785 copied = fctx.renamed()
1786 copied = fctx.renamed()
1786 if copied:
1787 if copied:
1787 copied = copied[0]
1788 copied = copied[0]
1788 return memfilectx(repo, path, fctx.data(),
1789 return memfilectx(repo, path, fctx.data(),
1789 islink=fctx.islink(), isexec=fctx.isexec(),
1790 islink=fctx.islink(), isexec=fctx.isexec(),
1790 copied=copied, memctx=memctx)
1791 copied=copied, memctx=memctx)
1791 self._filectxfn = getfilectx
1792 self._filectxfn = getfilectx
1792 else:
1793 else:
1793 # "util.cachefunc" reduces invocation of possibly expensive
1794 # "util.cachefunc" reduces invocation of possibly expensive
1794 # "filectxfn" for performance (e.g. converting from another VCS)
1795 # "filectxfn" for performance (e.g. converting from another VCS)
1795 self._filectxfn = util.cachefunc(filectxfn)
1796 self._filectxfn = util.cachefunc(filectxfn)
1796
1797
1797 if extra:
1798 if extra:
1798 self._extra = extra.copy()
1799 self._extra = extra.copy()
1799 else:
1800 else:
1800 self._extra = {}
1801 self._extra = {}
1801
1802
1802 if self._extra.get('branch', '') == '':
1803 if self._extra.get('branch', '') == '':
1803 self._extra['branch'] = 'default'
1804 self._extra['branch'] = 'default'
1804
1805
1805 if editor:
1806 if editor:
1806 self._text = editor(self._repo, self, [])
1807 self._text = editor(self._repo, self, [])
1807 self._repo.savecommitmessage(self._text)
1808 self._repo.savecommitmessage(self._text)
1808
1809
1809 def filectx(self, path, filelog=None):
1810 def filectx(self, path, filelog=None):
1810 """get a file context from the working directory
1811 """get a file context from the working directory
1811
1812
1812 Returns None if file doesn't exist and should be removed."""
1813 Returns None if file doesn't exist and should be removed."""
1813 return self._filectxfn(self._repo, self, path)
1814 return self._filectxfn(self._repo, self, path)
1814
1815
1815 def commit(self):
1816 def commit(self):
1816 """commit context to the repo"""
1817 """commit context to the repo"""
1817 return self._repo.commitctx(self)
1818 return self._repo.commitctx(self)
1818
1819
1819 @propertycache
1820 @propertycache
1820 def _manifest(self):
1821 def _manifest(self):
1821 """generate a manifest based on the return values of filectxfn"""
1822 """generate a manifest based on the return values of filectxfn"""
1822
1823
1823 # keep this simple for now; just worry about p1
1824 # keep this simple for now; just worry about p1
1824 pctx = self._parents[0]
1825 pctx = self._parents[0]
1825 man = pctx.manifest().copy()
1826 man = pctx.manifest().copy()
1826
1827
1827 for f in self._status.modified:
1828 for f in self._status.modified:
1828 p1node = nullid
1829 p1node = nullid
1829 p2node = nullid
1830 p2node = nullid
1830 p = pctx[f].parents() # if file isn't in pctx, check p2?
1831 p = pctx[f].parents() # if file isn't in pctx, check p2?
1831 if len(p) > 0:
1832 if len(p) > 0:
1832 p1node = p[0].node()
1833 p1node = p[0].node()
1833 if len(p) > 1:
1834 if len(p) > 1:
1834 p2node = p[1].node()
1835 p2node = p[1].node()
1835 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1836 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1836
1837
1837 for f in self._status.added:
1838 for f in self._status.added:
1838 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1839 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1839
1840
1840 for f in self._status.removed:
1841 for f in self._status.removed:
1841 if f in man:
1842 if f in man:
1842 del man[f]
1843 del man[f]
1843
1844
1844 return man
1845 return man
1845
1846
1846 @propertycache
1847 @propertycache
1847 def _status(self):
1848 def _status(self):
1848 """Calculate exact status from ``files`` specified at construction
1849 """Calculate exact status from ``files`` specified at construction
1849 """
1850 """
1850 man1 = self.p1().manifest()
1851 man1 = self.p1().manifest()
1851 p2 = self._parents[1]
1852 p2 = self._parents[1]
1852 # "1 < len(self._parents)" can't be used for checking
1853 # "1 < len(self._parents)" can't be used for checking
1853 # existence of the 2nd parent, because "memctx._parents" is
1854 # existence of the 2nd parent, because "memctx._parents" is
1854 # explicitly initialized by the list, of which length is 2.
1855 # explicitly initialized by the list, of which length is 2.
1855 if p2.node() != nullid:
1856 if p2.node() != nullid:
1856 man2 = p2.manifest()
1857 man2 = p2.manifest()
1857 managing = lambda f: f in man1 or f in man2
1858 managing = lambda f: f in man1 or f in man2
1858 else:
1859 else:
1859 managing = lambda f: f in man1
1860 managing = lambda f: f in man1
1860
1861
1861 modified, added, removed = [], [], []
1862 modified, added, removed = [], [], []
1862 for f in self._files:
1863 for f in self._files:
1863 if not managing(f):
1864 if not managing(f):
1864 added.append(f)
1865 added.append(f)
1865 elif self[f]:
1866 elif self[f]:
1866 modified.append(f)
1867 modified.append(f)
1867 else:
1868 else:
1868 removed.append(f)
1869 removed.append(f)
1869
1870
1870 return scmutil.status(modified, added, removed, [], [], [], [])
1871 return scmutil.status(modified, added, removed, [], [], [], [])
1871
1872
1872 class memfilectx(committablefilectx):
1873 class memfilectx(committablefilectx):
1873 """memfilectx represents an in-memory file to commit.
1874 """memfilectx represents an in-memory file to commit.
1874
1875
1875 See memctx and committablefilectx for more details.
1876 See memctx and committablefilectx for more details.
1876 """
1877 """
1877 def __init__(self, repo, path, data, islink=False,
1878 def __init__(self, repo, path, data, islink=False,
1878 isexec=False, copied=None, memctx=None):
1879 isexec=False, copied=None, memctx=None):
1879 """
1880 """
1880 path is the normalized file path relative to repository root.
1881 path is the normalized file path relative to repository root.
1881 data is the file content as a string.
1882 data is the file content as a string.
1882 islink is True if the file is a symbolic link.
1883 islink is True if the file is a symbolic link.
1883 isexec is True if the file is executable.
1884 isexec is True if the file is executable.
1884 copied is the source file path if current file was copied in the
1885 copied is the source file path if current file was copied in the
1885 revision being committed, or None."""
1886 revision being committed, or None."""
1886 super(memfilectx, self).__init__(repo, path, None, memctx)
1887 super(memfilectx, self).__init__(repo, path, None, memctx)
1887 self._data = data
1888 self._data = data
1888 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1889 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1889 self._copied = None
1890 self._copied = None
1890 if copied:
1891 if copied:
1891 self._copied = (copied, nullid)
1892 self._copied = (copied, nullid)
1892
1893
1893 def data(self):
1894 def data(self):
1894 return self._data
1895 return self._data
1895 def size(self):
1896 def size(self):
1896 return len(self.data())
1897 return len(self.data())
1897 def flags(self):
1898 def flags(self):
1898 return self._flags
1899 return self._flags
1899 def renamed(self):
1900 def renamed(self):
1900 return self._copied
1901 return self._copied
1901
1902
1902 def remove(self, ignoremissing=False):
1903 def remove(self, ignoremissing=False):
1903 """wraps unlink for a repo's working directory"""
1904 """wraps unlink for a repo's working directory"""
1904 # need to figure out what to do here
1905 # need to figure out what to do here
1905 del self._changectx[self._path]
1906 del self._changectx[self._path]
1906
1907
1907 def write(self, data, flags):
1908 def write(self, data, flags):
1908 """wraps repo.wwrite"""
1909 """wraps repo.wwrite"""
1909 self._data = data
1910 self._data = data
General Comments 0
You need to be logged in to leave comments. Login now