##// END OF EJS Templates
context: override workingctx.hex() to avoid a crash...
Matt Harbison -
r25590:183965a0 default
parent child Browse files
Show More
@@ -1,1912 +1,1915
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 # Phony node value to stand-in for new files in some uses of
20 # Phony node value to stand-in for new files in some uses of
21 # manifests. Manifests support 21-byte hashes for nodes which are
21 # manifests. Manifests support 21-byte hashes for nodes which are
22 # dirty in the working copy.
22 # dirty in the working copy.
23 _newnode = '!' * 21
23 _newnode = '!' * 21
24
24
25 class basectx(object):
25 class basectx(object):
26 """A basectx object represents the common logic for its children:
26 """A basectx object represents the common logic for its children:
27 changectx: read-only context that is already present in the repo,
27 changectx: read-only context that is already present in the repo,
28 workingctx: a context that represents the working directory and can
28 workingctx: a context that represents the working directory and can
29 be committed,
29 be committed,
30 memctx: a context that represents changes in-memory and can also
30 memctx: a context that represents changes in-memory and can also
31 be committed."""
31 be committed."""
32 def __new__(cls, repo, changeid='', *args, **kwargs):
32 def __new__(cls, repo, changeid='', *args, **kwargs):
33 if isinstance(changeid, basectx):
33 if isinstance(changeid, basectx):
34 return changeid
34 return changeid
35
35
36 o = super(basectx, cls).__new__(cls)
36 o = super(basectx, cls).__new__(cls)
37
37
38 o._repo = repo
38 o._repo = repo
39 o._rev = nullrev
39 o._rev = nullrev
40 o._node = nullid
40 o._node = nullid
41
41
42 return o
42 return o
43
43
44 def __str__(self):
44 def __str__(self):
45 return short(self.node())
45 return short(self.node())
46
46
47 def __int__(self):
47 def __int__(self):
48 return self.rev()
48 return self.rev()
49
49
50 def __repr__(self):
50 def __repr__(self):
51 return "<%s %s>" % (type(self).__name__, str(self))
51 return "<%s %s>" % (type(self).__name__, str(self))
52
52
53 def __eq__(self, other):
53 def __eq__(self, other):
54 try:
54 try:
55 return type(self) == type(other) and self._rev == other._rev
55 return type(self) == type(other) and self._rev == other._rev
56 except AttributeError:
56 except AttributeError:
57 return False
57 return False
58
58
59 def __ne__(self, other):
59 def __ne__(self, other):
60 return not (self == other)
60 return not (self == other)
61
61
62 def __contains__(self, key):
62 def __contains__(self, key):
63 return key in self._manifest
63 return key in self._manifest
64
64
65 def __getitem__(self, key):
65 def __getitem__(self, key):
66 return self.filectx(key)
66 return self.filectx(key)
67
67
68 def __iter__(self):
68 def __iter__(self):
69 return iter(self._manifest)
69 return iter(self._manifest)
70
70
71 def _manifestmatches(self, match, s):
71 def _manifestmatches(self, match, s):
72 """generate a new manifest filtered by the match argument
72 """generate a new manifest filtered by the match argument
73
73
74 This method is for internal use only and mainly exists to provide an
74 This method is for internal use only and mainly exists to provide an
75 object oriented way for other contexts to customize the manifest
75 object oriented way for other contexts to customize the manifest
76 generation.
76 generation.
77 """
77 """
78 return self.manifest().matches(match)
78 return self.manifest().matches(match)
79
79
80 def _matchstatus(self, other, match):
80 def _matchstatus(self, other, match):
81 """return match.always if match is none
81 """return match.always if match is none
82
82
83 This internal method provides a way for child objects to override the
83 This internal method provides a way for child objects to override the
84 match operator.
84 match operator.
85 """
85 """
86 return match or matchmod.always(self._repo.root, self._repo.getcwd())
86 return match or matchmod.always(self._repo.root, self._repo.getcwd())
87
87
88 def _buildstatus(self, other, s, match, listignored, listclean,
88 def _buildstatus(self, other, s, match, listignored, listclean,
89 listunknown):
89 listunknown):
90 """build a status with respect to another context"""
90 """build a status with respect to another context"""
91 # Load earliest manifest first for caching reasons. More specifically,
91 # Load earliest manifest first for caching reasons. More specifically,
92 # if you have revisions 1000 and 1001, 1001 is probably stored as a
92 # if you have revisions 1000 and 1001, 1001 is probably stored as a
93 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
93 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
94 # 1000 and cache it so that when you read 1001, we just need to apply a
94 # 1000 and cache it so that when you read 1001, we just need to apply a
95 # delta to what's in the cache. So that's one full reconstruction + one
95 # delta to what's in the cache. So that's one full reconstruction + one
96 # delta application.
96 # delta application.
97 if self.rev() is not None and self.rev() < other.rev():
97 if self.rev() is not None and self.rev() < other.rev():
98 self.manifest()
98 self.manifest()
99 mf1 = other._manifestmatches(match, s)
99 mf1 = other._manifestmatches(match, s)
100 mf2 = self._manifestmatches(match, s)
100 mf2 = self._manifestmatches(match, s)
101
101
102 modified, added = [], []
102 modified, added = [], []
103 removed = []
103 removed = []
104 clean = []
104 clean = []
105 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
105 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
106 deletedset = set(deleted)
106 deletedset = set(deleted)
107 d = mf1.diff(mf2, clean=listclean)
107 d = mf1.diff(mf2, clean=listclean)
108 for fn, value in d.iteritems():
108 for fn, value in d.iteritems():
109 if fn in deletedset:
109 if fn in deletedset:
110 continue
110 continue
111 if value is None:
111 if value is None:
112 clean.append(fn)
112 clean.append(fn)
113 continue
113 continue
114 (node1, flag1), (node2, flag2) = value
114 (node1, flag1), (node2, flag2) = value
115 if node1 is None:
115 if node1 is None:
116 added.append(fn)
116 added.append(fn)
117 elif node2 is None:
117 elif node2 is None:
118 removed.append(fn)
118 removed.append(fn)
119 elif node2 != _newnode:
119 elif node2 != _newnode:
120 # The file was not a new file in mf2, so an entry
120 # The file was not a new file in mf2, so an entry
121 # from diff is really a difference.
121 # from diff is really a difference.
122 modified.append(fn)
122 modified.append(fn)
123 elif self[fn].cmp(other[fn]):
123 elif self[fn].cmp(other[fn]):
124 # node2 was newnode, but the working file doesn't
124 # node2 was newnode, but the working file doesn't
125 # match the one in mf1.
125 # match the one in mf1.
126 modified.append(fn)
126 modified.append(fn)
127 else:
127 else:
128 clean.append(fn)
128 clean.append(fn)
129
129
130 if removed:
130 if removed:
131 # need to filter files if they are already reported as removed
131 # need to filter files if they are already reported as removed
132 unknown = [fn for fn in unknown if fn not in mf1]
132 unknown = [fn for fn in unknown if fn not in mf1]
133 ignored = [fn for fn in ignored if fn not in mf1]
133 ignored = [fn for fn in ignored if fn not in mf1]
134 # if they're deleted, don't report them as removed
134 # if they're deleted, don't report them as removed
135 removed = [fn for fn in removed if fn not in deletedset]
135 removed = [fn for fn in removed if fn not in deletedset]
136
136
137 return scmutil.status(modified, added, removed, deleted, unknown,
137 return scmutil.status(modified, added, removed, deleted, unknown,
138 ignored, clean)
138 ignored, clean)
139
139
140 @propertycache
140 @propertycache
141 def substate(self):
141 def substate(self):
142 return subrepo.state(self, self._repo.ui)
142 return subrepo.state(self, self._repo.ui)
143
143
144 def subrev(self, subpath):
144 def subrev(self, subpath):
145 return self.substate[subpath][1]
145 return self.substate[subpath][1]
146
146
147 def rev(self):
147 def rev(self):
148 return self._rev
148 return self._rev
149 def node(self):
149 def node(self):
150 return self._node
150 return self._node
151 def hex(self):
151 def hex(self):
152 return hex(self.node())
152 return hex(self.node())
153 def manifest(self):
153 def manifest(self):
154 return self._manifest
154 return self._manifest
155 def repo(self):
155 def repo(self):
156 return self._repo
156 return self._repo
157 def phasestr(self):
157 def phasestr(self):
158 return phases.phasenames[self.phase()]
158 return phases.phasenames[self.phase()]
159 def mutable(self):
159 def mutable(self):
160 return self.phase() > phases.public
160 return self.phase() > phases.public
161
161
162 def getfileset(self, expr):
162 def getfileset(self, expr):
163 return fileset.getfileset(self, expr)
163 return fileset.getfileset(self, expr)
164
164
165 def obsolete(self):
165 def obsolete(self):
166 """True if the changeset is obsolete"""
166 """True if the changeset is obsolete"""
167 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
167 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
168
168
169 def extinct(self):
169 def extinct(self):
170 """True if the changeset is extinct"""
170 """True if the changeset is extinct"""
171 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
171 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
172
172
173 def unstable(self):
173 def unstable(self):
174 """True if the changeset is not obsolete but it's ancestor are"""
174 """True if the changeset is not obsolete but it's ancestor are"""
175 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
175 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
176
176
177 def bumped(self):
177 def bumped(self):
178 """True if the changeset try to be a successor of a public changeset
178 """True if the changeset try to be a successor of a public changeset
179
179
180 Only non-public and non-obsolete changesets may be bumped.
180 Only non-public and non-obsolete changesets may be bumped.
181 """
181 """
182 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
182 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
183
183
184 def divergent(self):
184 def divergent(self):
185 """Is a successors of a changeset with multiple possible successors set
185 """Is a successors of a changeset with multiple possible successors set
186
186
187 Only non-public and non-obsolete changesets may be divergent.
187 Only non-public and non-obsolete changesets may be divergent.
188 """
188 """
189 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
189 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
190
190
191 def troubled(self):
191 def troubled(self):
192 """True if the changeset is either unstable, bumped or divergent"""
192 """True if the changeset is either unstable, bumped or divergent"""
193 return self.unstable() or self.bumped() or self.divergent()
193 return self.unstable() or self.bumped() or self.divergent()
194
194
195 def troubles(self):
195 def troubles(self):
196 """return the list of troubles affecting this changesets.
196 """return the list of troubles affecting this changesets.
197
197
198 Troubles are returned as strings. possible values are:
198 Troubles are returned as strings. possible values are:
199 - unstable,
199 - unstable,
200 - bumped,
200 - bumped,
201 - divergent.
201 - divergent.
202 """
202 """
203 troubles = []
203 troubles = []
204 if self.unstable():
204 if self.unstable():
205 troubles.append('unstable')
205 troubles.append('unstable')
206 if self.bumped():
206 if self.bumped():
207 troubles.append('bumped')
207 troubles.append('bumped')
208 if self.divergent():
208 if self.divergent():
209 troubles.append('divergent')
209 troubles.append('divergent')
210 return troubles
210 return troubles
211
211
212 def parents(self):
212 def parents(self):
213 """return contexts for each parent changeset"""
213 """return contexts for each parent changeset"""
214 return self._parents
214 return self._parents
215
215
216 def p1(self):
216 def p1(self):
217 return self._parents[0]
217 return self._parents[0]
218
218
219 def p2(self):
219 def p2(self):
220 if len(self._parents) == 2:
220 if len(self._parents) == 2:
221 return self._parents[1]
221 return self._parents[1]
222 return changectx(self._repo, -1)
222 return changectx(self._repo, -1)
223
223
224 def _fileinfo(self, path):
224 def _fileinfo(self, path):
225 if '_manifest' in self.__dict__:
225 if '_manifest' in self.__dict__:
226 try:
226 try:
227 return self._manifest[path], self._manifest.flags(path)
227 return self._manifest[path], self._manifest.flags(path)
228 except KeyError:
228 except KeyError:
229 raise error.ManifestLookupError(self._node, path,
229 raise error.ManifestLookupError(self._node, path,
230 _('not found in manifest'))
230 _('not found in manifest'))
231 if '_manifestdelta' in self.__dict__ or path in self.files():
231 if '_manifestdelta' in self.__dict__ or path in self.files():
232 if path in self._manifestdelta:
232 if path in self._manifestdelta:
233 return (self._manifestdelta[path],
233 return (self._manifestdelta[path],
234 self._manifestdelta.flags(path))
234 self._manifestdelta.flags(path))
235 node, flag = self._repo.manifest.find(self._changeset[0], path)
235 node, flag = self._repo.manifest.find(self._changeset[0], path)
236 if not node:
236 if not node:
237 raise error.ManifestLookupError(self._node, path,
237 raise error.ManifestLookupError(self._node, path,
238 _('not found in manifest'))
238 _('not found in manifest'))
239
239
240 return node, flag
240 return node, flag
241
241
242 def filenode(self, path):
242 def filenode(self, path):
243 return self._fileinfo(path)[0]
243 return self._fileinfo(path)[0]
244
244
245 def flags(self, path):
245 def flags(self, path):
246 try:
246 try:
247 return self._fileinfo(path)[1]
247 return self._fileinfo(path)[1]
248 except error.LookupError:
248 except error.LookupError:
249 return ''
249 return ''
250
250
251 def sub(self, path):
251 def sub(self, path):
252 return subrepo.subrepo(self, path)
252 return subrepo.subrepo(self, path)
253
253
254 def nullsub(self, path, pctx):
254 def nullsub(self, path, pctx):
255 return subrepo.nullsubrepo(self, path, pctx)
255 return subrepo.nullsubrepo(self, path, pctx)
256
256
257 def match(self, pats=[], include=None, exclude=None, default='glob',
257 def match(self, pats=[], include=None, exclude=None, default='glob',
258 listsubrepos=False, badfn=None):
258 listsubrepos=False, badfn=None):
259 r = self._repo
259 r = self._repo
260 return matchmod.match(r.root, r.getcwd(), pats,
260 return matchmod.match(r.root, r.getcwd(), pats,
261 include, exclude, default,
261 include, exclude, default,
262 auditor=r.auditor, ctx=self,
262 auditor=r.auditor, ctx=self,
263 listsubrepos=listsubrepos, badfn=badfn)
263 listsubrepos=listsubrepos, badfn=badfn)
264
264
265 def diff(self, ctx2=None, match=None, **opts):
265 def diff(self, ctx2=None, match=None, **opts):
266 """Returns a diff generator for the given contexts and matcher"""
266 """Returns a diff generator for the given contexts and matcher"""
267 if ctx2 is None:
267 if ctx2 is None:
268 ctx2 = self.p1()
268 ctx2 = self.p1()
269 if ctx2 is not None:
269 if ctx2 is not None:
270 ctx2 = self._repo[ctx2]
270 ctx2 = self._repo[ctx2]
271 diffopts = patch.diffopts(self._repo.ui, opts)
271 diffopts = patch.diffopts(self._repo.ui, opts)
272 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
272 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
273
273
274 def dirs(self):
274 def dirs(self):
275 return self._manifest.dirs()
275 return self._manifest.dirs()
276
276
277 def hasdir(self, dir):
277 def hasdir(self, dir):
278 return self._manifest.hasdir(dir)
278 return self._manifest.hasdir(dir)
279
279
280 def dirty(self, missing=False, merge=True, branch=True):
280 def dirty(self, missing=False, merge=True, branch=True):
281 return False
281 return False
282
282
283 def status(self, other=None, match=None, listignored=False,
283 def status(self, other=None, match=None, listignored=False,
284 listclean=False, listunknown=False, listsubrepos=False):
284 listclean=False, listunknown=False, listsubrepos=False):
285 """return status of files between two nodes or node and working
285 """return status of files between two nodes or node and working
286 directory.
286 directory.
287
287
288 If other is None, compare this node with working directory.
288 If other is None, compare this node with working directory.
289
289
290 returns (modified, added, removed, deleted, unknown, ignored, clean)
290 returns (modified, added, removed, deleted, unknown, ignored, clean)
291 """
291 """
292
292
293 ctx1 = self
293 ctx1 = self
294 ctx2 = self._repo[other]
294 ctx2 = self._repo[other]
295
295
296 # This next code block is, admittedly, fragile logic that tests for
296 # This next code block is, admittedly, fragile logic that tests for
297 # reversing the contexts and wouldn't need to exist if it weren't for
297 # reversing the contexts and wouldn't need to exist if it weren't for
298 # the fast (and common) code path of comparing the working directory
298 # the fast (and common) code path of comparing the working directory
299 # with its first parent.
299 # with its first parent.
300 #
300 #
301 # What we're aiming for here is the ability to call:
301 # What we're aiming for here is the ability to call:
302 #
302 #
303 # workingctx.status(parentctx)
303 # workingctx.status(parentctx)
304 #
304 #
305 # If we always built the manifest for each context and compared those,
305 # If we always built the manifest for each context and compared those,
306 # then we'd be done. But the special case of the above call means we
306 # then we'd be done. But the special case of the above call means we
307 # just copy the manifest of the parent.
307 # just copy the manifest of the parent.
308 reversed = False
308 reversed = False
309 if (not isinstance(ctx1, changectx)
309 if (not isinstance(ctx1, changectx)
310 and isinstance(ctx2, changectx)):
310 and isinstance(ctx2, changectx)):
311 reversed = True
311 reversed = True
312 ctx1, ctx2 = ctx2, ctx1
312 ctx1, ctx2 = ctx2, ctx1
313
313
314 match = ctx2._matchstatus(ctx1, match)
314 match = ctx2._matchstatus(ctx1, match)
315 r = scmutil.status([], [], [], [], [], [], [])
315 r = scmutil.status([], [], [], [], [], [], [])
316 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
316 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
317 listunknown)
317 listunknown)
318
318
319 if reversed:
319 if reversed:
320 # Reverse added and removed. Clear deleted, unknown and ignored as
320 # Reverse added and removed. Clear deleted, unknown and ignored as
321 # these make no sense to reverse.
321 # these make no sense to reverse.
322 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
322 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
323 r.clean)
323 r.clean)
324
324
325 if listsubrepos:
325 if listsubrepos:
326 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
326 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
327 rev2 = ctx2.subrev(subpath)
327 rev2 = ctx2.subrev(subpath)
328 try:
328 try:
329 submatch = matchmod.narrowmatcher(subpath, match)
329 submatch = matchmod.narrowmatcher(subpath, match)
330 s = sub.status(rev2, match=submatch, ignored=listignored,
330 s = sub.status(rev2, match=submatch, ignored=listignored,
331 clean=listclean, unknown=listunknown,
331 clean=listclean, unknown=listunknown,
332 listsubrepos=True)
332 listsubrepos=True)
333 for rfiles, sfiles in zip(r, s):
333 for rfiles, sfiles in zip(r, s):
334 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
334 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
335 except error.LookupError:
335 except error.LookupError:
336 self._repo.ui.status(_("skipping missing "
336 self._repo.ui.status(_("skipping missing "
337 "subrepository: %s\n") % subpath)
337 "subrepository: %s\n") % subpath)
338
338
339 for l in r:
339 for l in r:
340 l.sort()
340 l.sort()
341
341
342 return r
342 return r
343
343
344
344
345 def makememctx(repo, parents, text, user, date, branch, files, store,
345 def makememctx(repo, parents, text, user, date, branch, files, store,
346 editor=None, extra=None):
346 editor=None, extra=None):
347 def getfilectx(repo, memctx, path):
347 def getfilectx(repo, memctx, path):
348 data, mode, copied = store.getfile(path)
348 data, mode, copied = store.getfile(path)
349 if data is None:
349 if data is None:
350 return None
350 return None
351 islink, isexec = mode
351 islink, isexec = mode
352 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
352 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
353 copied=copied, memctx=memctx)
353 copied=copied, memctx=memctx)
354 if extra is None:
354 if extra is None:
355 extra = {}
355 extra = {}
356 if branch:
356 if branch:
357 extra['branch'] = encoding.fromlocal(branch)
357 extra['branch'] = encoding.fromlocal(branch)
358 ctx = memctx(repo, parents, text, files, getfilectx, user,
358 ctx = memctx(repo, parents, text, files, getfilectx, user,
359 date, extra, editor)
359 date, extra, editor)
360 return ctx
360 return ctx
361
361
362 class changectx(basectx):
362 class changectx(basectx):
363 """A changecontext object makes access to data related to a particular
363 """A changecontext object makes access to data related to a particular
364 changeset convenient. It represents a read-only context already present in
364 changeset convenient. It represents a read-only context already present in
365 the repo."""
365 the repo."""
366 def __init__(self, repo, changeid=''):
366 def __init__(self, repo, changeid=''):
367 """changeid is a revision number, node, or tag"""
367 """changeid is a revision number, node, or tag"""
368
368
369 # since basectx.__new__ already took care of copying the object, we
369 # since basectx.__new__ already took care of copying the object, we
370 # don't need to do anything in __init__, so we just exit here
370 # don't need to do anything in __init__, so we just exit here
371 if isinstance(changeid, basectx):
371 if isinstance(changeid, basectx):
372 return
372 return
373
373
374 if changeid == '':
374 if changeid == '':
375 changeid = '.'
375 changeid = '.'
376 self._repo = repo
376 self._repo = repo
377
377
378 try:
378 try:
379 if isinstance(changeid, int):
379 if isinstance(changeid, int):
380 self._node = repo.changelog.node(changeid)
380 self._node = repo.changelog.node(changeid)
381 self._rev = changeid
381 self._rev = changeid
382 return
382 return
383 if isinstance(changeid, long):
383 if isinstance(changeid, long):
384 changeid = str(changeid)
384 changeid = str(changeid)
385 if changeid == 'null':
385 if changeid == 'null':
386 self._node = nullid
386 self._node = nullid
387 self._rev = nullrev
387 self._rev = nullrev
388 return
388 return
389 if changeid == 'tip':
389 if changeid == 'tip':
390 self._node = repo.changelog.tip()
390 self._node = repo.changelog.tip()
391 self._rev = repo.changelog.rev(self._node)
391 self._rev = repo.changelog.rev(self._node)
392 return
392 return
393 if changeid == '.' or changeid == repo.dirstate.p1():
393 if changeid == '.' or changeid == repo.dirstate.p1():
394 # this is a hack to delay/avoid loading obsmarkers
394 # this is a hack to delay/avoid loading obsmarkers
395 # when we know that '.' won't be hidden
395 # when we know that '.' won't be hidden
396 self._node = repo.dirstate.p1()
396 self._node = repo.dirstate.p1()
397 self._rev = repo.unfiltered().changelog.rev(self._node)
397 self._rev = repo.unfiltered().changelog.rev(self._node)
398 return
398 return
399 if len(changeid) == 20:
399 if len(changeid) == 20:
400 try:
400 try:
401 self._node = changeid
401 self._node = changeid
402 self._rev = repo.changelog.rev(changeid)
402 self._rev = repo.changelog.rev(changeid)
403 return
403 return
404 except error.FilteredRepoLookupError:
404 except error.FilteredRepoLookupError:
405 raise
405 raise
406 except LookupError:
406 except LookupError:
407 pass
407 pass
408
408
409 try:
409 try:
410 r = int(changeid)
410 r = int(changeid)
411 if str(r) != changeid:
411 if str(r) != changeid:
412 raise ValueError
412 raise ValueError
413 l = len(repo.changelog)
413 l = len(repo.changelog)
414 if r < 0:
414 if r < 0:
415 r += l
415 r += l
416 if r < 0 or r >= l:
416 if r < 0 or r >= l:
417 raise ValueError
417 raise ValueError
418 self._rev = r
418 self._rev = r
419 self._node = repo.changelog.node(r)
419 self._node = repo.changelog.node(r)
420 return
420 return
421 except error.FilteredIndexError:
421 except error.FilteredIndexError:
422 raise
422 raise
423 except (ValueError, OverflowError, IndexError):
423 except (ValueError, OverflowError, IndexError):
424 pass
424 pass
425
425
426 if len(changeid) == 40:
426 if len(changeid) == 40:
427 try:
427 try:
428 self._node = bin(changeid)
428 self._node = bin(changeid)
429 self._rev = repo.changelog.rev(self._node)
429 self._rev = repo.changelog.rev(self._node)
430 return
430 return
431 except error.FilteredLookupError:
431 except error.FilteredLookupError:
432 raise
432 raise
433 except (TypeError, LookupError):
433 except (TypeError, LookupError):
434 pass
434 pass
435
435
436 # lookup bookmarks through the name interface
436 # lookup bookmarks through the name interface
437 try:
437 try:
438 self._node = repo.names.singlenode(repo, changeid)
438 self._node = repo.names.singlenode(repo, changeid)
439 self._rev = repo.changelog.rev(self._node)
439 self._rev = repo.changelog.rev(self._node)
440 return
440 return
441 except KeyError:
441 except KeyError:
442 pass
442 pass
443 except error.FilteredRepoLookupError:
443 except error.FilteredRepoLookupError:
444 raise
444 raise
445 except error.RepoLookupError:
445 except error.RepoLookupError:
446 pass
446 pass
447
447
448 self._node = repo.unfiltered().changelog._partialmatch(changeid)
448 self._node = repo.unfiltered().changelog._partialmatch(changeid)
449 if self._node is not None:
449 if self._node is not None:
450 self._rev = repo.changelog.rev(self._node)
450 self._rev = repo.changelog.rev(self._node)
451 return
451 return
452
452
453 # lookup failed
453 # lookup failed
454 # check if it might have come from damaged dirstate
454 # check if it might have come from damaged dirstate
455 #
455 #
456 # XXX we could avoid the unfiltered if we had a recognizable
456 # XXX we could avoid the unfiltered if we had a recognizable
457 # exception for filtered changeset access
457 # exception for filtered changeset access
458 if changeid in repo.unfiltered().dirstate.parents():
458 if changeid in repo.unfiltered().dirstate.parents():
459 msg = _("working directory has unknown parent '%s'!")
459 msg = _("working directory has unknown parent '%s'!")
460 raise error.Abort(msg % short(changeid))
460 raise error.Abort(msg % short(changeid))
461 try:
461 try:
462 if len(changeid) == 20:
462 if len(changeid) == 20:
463 changeid = hex(changeid)
463 changeid = hex(changeid)
464 except TypeError:
464 except TypeError:
465 pass
465 pass
466 except (error.FilteredIndexError, error.FilteredLookupError,
466 except (error.FilteredIndexError, error.FilteredLookupError,
467 error.FilteredRepoLookupError):
467 error.FilteredRepoLookupError):
468 if repo.filtername.startswith('visible'):
468 if repo.filtername.startswith('visible'):
469 msg = _("hidden revision '%s'") % changeid
469 msg = _("hidden revision '%s'") % changeid
470 hint = _('use --hidden to access hidden revisions')
470 hint = _('use --hidden to access hidden revisions')
471 raise error.FilteredRepoLookupError(msg, hint=hint)
471 raise error.FilteredRepoLookupError(msg, hint=hint)
472 msg = _("filtered revision '%s' (not in '%s' subset)")
472 msg = _("filtered revision '%s' (not in '%s' subset)")
473 msg %= (changeid, repo.filtername)
473 msg %= (changeid, repo.filtername)
474 raise error.FilteredRepoLookupError(msg)
474 raise error.FilteredRepoLookupError(msg)
475 except IndexError:
475 except IndexError:
476 pass
476 pass
477 raise error.RepoLookupError(
477 raise error.RepoLookupError(
478 _("unknown revision '%s'") % changeid)
478 _("unknown revision '%s'") % changeid)
479
479
480 def __hash__(self):
480 def __hash__(self):
481 try:
481 try:
482 return hash(self._rev)
482 return hash(self._rev)
483 except AttributeError:
483 except AttributeError:
484 return id(self)
484 return id(self)
485
485
486 def __nonzero__(self):
486 def __nonzero__(self):
487 return self._rev != nullrev
487 return self._rev != nullrev
488
488
489 @propertycache
489 @propertycache
490 def _changeset(self):
490 def _changeset(self):
491 return self._repo.changelog.read(self.rev())
491 return self._repo.changelog.read(self.rev())
492
492
493 @propertycache
493 @propertycache
494 def _manifest(self):
494 def _manifest(self):
495 return self._repo.manifest.read(self._changeset[0])
495 return self._repo.manifest.read(self._changeset[0])
496
496
497 @propertycache
497 @propertycache
498 def _manifestdelta(self):
498 def _manifestdelta(self):
499 return self._repo.manifest.readdelta(self._changeset[0])
499 return self._repo.manifest.readdelta(self._changeset[0])
500
500
501 @propertycache
501 @propertycache
502 def _parents(self):
502 def _parents(self):
503 p = self._repo.changelog.parentrevs(self._rev)
503 p = self._repo.changelog.parentrevs(self._rev)
504 if p[1] == nullrev:
504 if p[1] == nullrev:
505 p = p[:-1]
505 p = p[:-1]
506 return [changectx(self._repo, x) for x in p]
506 return [changectx(self._repo, x) for x in p]
507
507
508 def changeset(self):
508 def changeset(self):
509 return self._changeset
509 return self._changeset
510 def manifestnode(self):
510 def manifestnode(self):
511 return self._changeset[0]
511 return self._changeset[0]
512
512
513 def user(self):
513 def user(self):
514 return self._changeset[1]
514 return self._changeset[1]
515 def date(self):
515 def date(self):
516 return self._changeset[2]
516 return self._changeset[2]
517 def files(self):
517 def files(self):
518 return self._changeset[3]
518 return self._changeset[3]
519 def description(self):
519 def description(self):
520 return self._changeset[4]
520 return self._changeset[4]
521 def branch(self):
521 def branch(self):
522 return encoding.tolocal(self._changeset[5].get("branch"))
522 return encoding.tolocal(self._changeset[5].get("branch"))
523 def closesbranch(self):
523 def closesbranch(self):
524 return 'close' in self._changeset[5]
524 return 'close' in self._changeset[5]
525 def extra(self):
525 def extra(self):
526 return self._changeset[5]
526 return self._changeset[5]
527 def tags(self):
527 def tags(self):
528 return self._repo.nodetags(self._node)
528 return self._repo.nodetags(self._node)
529 def bookmarks(self):
529 def bookmarks(self):
530 return self._repo.nodebookmarks(self._node)
530 return self._repo.nodebookmarks(self._node)
531 def phase(self):
531 def phase(self):
532 return self._repo._phasecache.phase(self._repo, self._rev)
532 return self._repo._phasecache.phase(self._repo, self._rev)
533 def hidden(self):
533 def hidden(self):
534 return self._rev in repoview.filterrevs(self._repo, 'visible')
534 return self._rev in repoview.filterrevs(self._repo, 'visible')
535
535
536 def children(self):
536 def children(self):
537 """return contexts for each child changeset"""
537 """return contexts for each child changeset"""
538 c = self._repo.changelog.children(self._node)
538 c = self._repo.changelog.children(self._node)
539 return [changectx(self._repo, x) for x in c]
539 return [changectx(self._repo, x) for x in c]
540
540
541 def ancestors(self):
541 def ancestors(self):
542 for a in self._repo.changelog.ancestors([self._rev]):
542 for a in self._repo.changelog.ancestors([self._rev]):
543 yield changectx(self._repo, a)
543 yield changectx(self._repo, a)
544
544
545 def descendants(self):
545 def descendants(self):
546 for d in self._repo.changelog.descendants([self._rev]):
546 for d in self._repo.changelog.descendants([self._rev]):
547 yield changectx(self._repo, d)
547 yield changectx(self._repo, d)
548
548
549 def filectx(self, path, fileid=None, filelog=None):
549 def filectx(self, path, fileid=None, filelog=None):
550 """get a file context from this changeset"""
550 """get a file context from this changeset"""
551 if fileid is None:
551 if fileid is None:
552 fileid = self.filenode(path)
552 fileid = self.filenode(path)
553 return filectx(self._repo, path, fileid=fileid,
553 return filectx(self._repo, path, fileid=fileid,
554 changectx=self, filelog=filelog)
554 changectx=self, filelog=filelog)
555
555
556 def ancestor(self, c2, warn=False):
556 def ancestor(self, c2, warn=False):
557 """return the "best" ancestor context of self and c2
557 """return the "best" ancestor context of self and c2
558
558
559 If there are multiple candidates, it will show a message and check
559 If there are multiple candidates, it will show a message and check
560 merge.preferancestor configuration before falling back to the
560 merge.preferancestor configuration before falling back to the
561 revlog ancestor."""
561 revlog ancestor."""
562 # deal with workingctxs
562 # deal with workingctxs
563 n2 = c2._node
563 n2 = c2._node
564 if n2 is None:
564 if n2 is None:
565 n2 = c2._parents[0]._node
565 n2 = c2._parents[0]._node
566 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
566 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
567 if not cahs:
567 if not cahs:
568 anc = nullid
568 anc = nullid
569 elif len(cahs) == 1:
569 elif len(cahs) == 1:
570 anc = cahs[0]
570 anc = cahs[0]
571 else:
571 else:
572 for r in self._repo.ui.configlist('merge', 'preferancestor'):
572 for r in self._repo.ui.configlist('merge', 'preferancestor'):
573 try:
573 try:
574 ctx = changectx(self._repo, r)
574 ctx = changectx(self._repo, r)
575 except error.RepoLookupError:
575 except error.RepoLookupError:
576 continue
576 continue
577 anc = ctx.node()
577 anc = ctx.node()
578 if anc in cahs:
578 if anc in cahs:
579 break
579 break
580 else:
580 else:
581 anc = self._repo.changelog.ancestor(self._node, n2)
581 anc = self._repo.changelog.ancestor(self._node, n2)
582 if warn:
582 if warn:
583 self._repo.ui.status(
583 self._repo.ui.status(
584 (_("note: using %s as ancestor of %s and %s\n") %
584 (_("note: using %s as ancestor of %s and %s\n") %
585 (short(anc), short(self._node), short(n2))) +
585 (short(anc), short(self._node), short(n2))) +
586 ''.join(_(" alternatively, use --config "
586 ''.join(_(" alternatively, use --config "
587 "merge.preferancestor=%s\n") %
587 "merge.preferancestor=%s\n") %
588 short(n) for n in sorted(cahs) if n != anc))
588 short(n) for n in sorted(cahs) if n != anc))
589 return changectx(self._repo, anc)
589 return changectx(self._repo, anc)
590
590
591 def descendant(self, other):
591 def descendant(self, other):
592 """True if other is descendant of this changeset"""
592 """True if other is descendant of this changeset"""
593 return self._repo.changelog.descendant(self._rev, other._rev)
593 return self._repo.changelog.descendant(self._rev, other._rev)
594
594
595 def walk(self, match):
595 def walk(self, match):
596 '''Generates matching file names.'''
596 '''Generates matching file names.'''
597
597
598 # Wrap match.bad method to have message with nodeid
598 # Wrap match.bad method to have message with nodeid
599 def bad(fn, msg):
599 def bad(fn, msg):
600 # The manifest doesn't know about subrepos, so don't complain about
600 # The manifest doesn't know about subrepos, so don't complain about
601 # paths into valid subrepos.
601 # paths into valid subrepos.
602 if any(fn == s or fn.startswith(s + '/')
602 if any(fn == s or fn.startswith(s + '/')
603 for s in self.substate):
603 for s in self.substate):
604 return
604 return
605 match.bad(fn, _('no such file in rev %s') % self)
605 match.bad(fn, _('no such file in rev %s') % self)
606
606
607 m = matchmod.badmatch(match, bad)
607 m = matchmod.badmatch(match, bad)
608 return self._manifest.walk(m)
608 return self._manifest.walk(m)
609
609
610 def matches(self, match):
610 def matches(self, match):
611 return self.walk(match)
611 return self.walk(match)
612
612
613 class basefilectx(object):
613 class basefilectx(object):
614 """A filecontext object represents the common logic for its children:
614 """A filecontext object represents the common logic for its children:
615 filectx: read-only access to a filerevision that is already present
615 filectx: read-only access to a filerevision that is already present
616 in the repo,
616 in the repo,
617 workingfilectx: a filecontext that represents files from the working
617 workingfilectx: a filecontext that represents files from the working
618 directory,
618 directory,
619 memfilectx: a filecontext that represents files in-memory."""
619 memfilectx: a filecontext that represents files in-memory."""
620 def __new__(cls, repo, path, *args, **kwargs):
620 def __new__(cls, repo, path, *args, **kwargs):
621 return super(basefilectx, cls).__new__(cls)
621 return super(basefilectx, cls).__new__(cls)
622
622
623 @propertycache
623 @propertycache
624 def _filelog(self):
624 def _filelog(self):
625 return self._repo.file(self._path)
625 return self._repo.file(self._path)
626
626
627 @propertycache
627 @propertycache
628 def _changeid(self):
628 def _changeid(self):
629 if '_changeid' in self.__dict__:
629 if '_changeid' in self.__dict__:
630 return self._changeid
630 return self._changeid
631 elif '_changectx' in self.__dict__:
631 elif '_changectx' in self.__dict__:
632 return self._changectx.rev()
632 return self._changectx.rev()
633 elif '_descendantrev' in self.__dict__:
633 elif '_descendantrev' in self.__dict__:
634 # this file context was created from a revision with a known
634 # this file context was created from a revision with a known
635 # descendant, we can (lazily) correct for linkrev aliases
635 # descendant, we can (lazily) correct for linkrev aliases
636 return self._adjustlinkrev(self._path, self._filelog,
636 return self._adjustlinkrev(self._path, self._filelog,
637 self._filenode, self._descendantrev)
637 self._filenode, self._descendantrev)
638 else:
638 else:
639 return self._filelog.linkrev(self._filerev)
639 return self._filelog.linkrev(self._filerev)
640
640
641 @propertycache
641 @propertycache
642 def _filenode(self):
642 def _filenode(self):
643 if '_fileid' in self.__dict__:
643 if '_fileid' in self.__dict__:
644 return self._filelog.lookup(self._fileid)
644 return self._filelog.lookup(self._fileid)
645 else:
645 else:
646 return self._changectx.filenode(self._path)
646 return self._changectx.filenode(self._path)
647
647
648 @propertycache
648 @propertycache
649 def _filerev(self):
649 def _filerev(self):
650 return self._filelog.rev(self._filenode)
650 return self._filelog.rev(self._filenode)
651
651
652 @propertycache
652 @propertycache
653 def _repopath(self):
653 def _repopath(self):
654 return self._path
654 return self._path
655
655
656 def __nonzero__(self):
656 def __nonzero__(self):
657 try:
657 try:
658 self._filenode
658 self._filenode
659 return True
659 return True
660 except error.LookupError:
660 except error.LookupError:
661 # file is missing
661 # file is missing
662 return False
662 return False
663
663
664 def __str__(self):
664 def __str__(self):
665 return "%s@%s" % (self.path(), self._changectx)
665 return "%s@%s" % (self.path(), self._changectx)
666
666
667 def __repr__(self):
667 def __repr__(self):
668 return "<%s %s>" % (type(self).__name__, str(self))
668 return "<%s %s>" % (type(self).__name__, str(self))
669
669
670 def __hash__(self):
670 def __hash__(self):
671 try:
671 try:
672 return hash((self._path, self._filenode))
672 return hash((self._path, self._filenode))
673 except AttributeError:
673 except AttributeError:
674 return id(self)
674 return id(self)
675
675
676 def __eq__(self, other):
676 def __eq__(self, other):
677 try:
677 try:
678 return (type(self) == type(other) and self._path == other._path
678 return (type(self) == type(other) and self._path == other._path
679 and self._filenode == other._filenode)
679 and self._filenode == other._filenode)
680 except AttributeError:
680 except AttributeError:
681 return False
681 return False
682
682
683 def __ne__(self, other):
683 def __ne__(self, other):
684 return not (self == other)
684 return not (self == other)
685
685
686 def filerev(self):
686 def filerev(self):
687 return self._filerev
687 return self._filerev
688 def filenode(self):
688 def filenode(self):
689 return self._filenode
689 return self._filenode
690 def flags(self):
690 def flags(self):
691 return self._changectx.flags(self._path)
691 return self._changectx.flags(self._path)
692 def filelog(self):
692 def filelog(self):
693 return self._filelog
693 return self._filelog
694 def rev(self):
694 def rev(self):
695 return self._changeid
695 return self._changeid
696 def linkrev(self):
696 def linkrev(self):
697 return self._filelog.linkrev(self._filerev)
697 return self._filelog.linkrev(self._filerev)
698 def node(self):
698 def node(self):
699 return self._changectx.node()
699 return self._changectx.node()
700 def hex(self):
700 def hex(self):
701 return self._changectx.hex()
701 return self._changectx.hex()
702 def user(self):
702 def user(self):
703 return self._changectx.user()
703 return self._changectx.user()
704 def date(self):
704 def date(self):
705 return self._changectx.date()
705 return self._changectx.date()
706 def files(self):
706 def files(self):
707 return self._changectx.files()
707 return self._changectx.files()
708 def description(self):
708 def description(self):
709 return self._changectx.description()
709 return self._changectx.description()
710 def branch(self):
710 def branch(self):
711 return self._changectx.branch()
711 return self._changectx.branch()
712 def extra(self):
712 def extra(self):
713 return self._changectx.extra()
713 return self._changectx.extra()
714 def phase(self):
714 def phase(self):
715 return self._changectx.phase()
715 return self._changectx.phase()
716 def phasestr(self):
716 def phasestr(self):
717 return self._changectx.phasestr()
717 return self._changectx.phasestr()
718 def manifest(self):
718 def manifest(self):
719 return self._changectx.manifest()
719 return self._changectx.manifest()
720 def changectx(self):
720 def changectx(self):
721 return self._changectx
721 return self._changectx
722 def repo(self):
722 def repo(self):
723 return self._repo
723 return self._repo
724
724
725 def path(self):
725 def path(self):
726 return self._path
726 return self._path
727
727
728 def isbinary(self):
728 def isbinary(self):
729 try:
729 try:
730 return util.binary(self.data())
730 return util.binary(self.data())
731 except IOError:
731 except IOError:
732 return False
732 return False
733 def isexec(self):
733 def isexec(self):
734 return 'x' in self.flags()
734 return 'x' in self.flags()
735 def islink(self):
735 def islink(self):
736 return 'l' in self.flags()
736 return 'l' in self.flags()
737
737
738 def cmp(self, fctx):
738 def cmp(self, fctx):
739 """compare with other file context
739 """compare with other file context
740
740
741 returns True if different than fctx.
741 returns True if different than fctx.
742 """
742 """
743 if (fctx._filerev is None
743 if (fctx._filerev is None
744 and (self._repo._encodefilterpats
744 and (self._repo._encodefilterpats
745 # if file data starts with '\1\n', empty metadata block is
745 # if file data starts with '\1\n', empty metadata block is
746 # prepended, which adds 4 bytes to filelog.size().
746 # prepended, which adds 4 bytes to filelog.size().
747 or self.size() - 4 == fctx.size())
747 or self.size() - 4 == fctx.size())
748 or self.size() == fctx.size()):
748 or self.size() == fctx.size()):
749 return self._filelog.cmp(self._filenode, fctx.data())
749 return self._filelog.cmp(self._filenode, fctx.data())
750
750
751 return True
751 return True
752
752
753 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
753 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
754 """return the first ancestor of <srcrev> introducing <fnode>
754 """return the first ancestor of <srcrev> introducing <fnode>
755
755
756 If the linkrev of the file revision does not point to an ancestor of
756 If the linkrev of the file revision does not point to an ancestor of
757 srcrev, we'll walk down the ancestors until we find one introducing
757 srcrev, we'll walk down the ancestors until we find one introducing
758 this file revision.
758 this file revision.
759
759
760 :repo: a localrepository object (used to access changelog and manifest)
760 :repo: a localrepository object (used to access changelog and manifest)
761 :path: the file path
761 :path: the file path
762 :fnode: the nodeid of the file revision
762 :fnode: the nodeid of the file revision
763 :filelog: the filelog of this path
763 :filelog: the filelog of this path
764 :srcrev: the changeset revision we search ancestors from
764 :srcrev: the changeset revision we search ancestors from
765 :inclusive: if true, the src revision will also be checked
765 :inclusive: if true, the src revision will also be checked
766 """
766 """
767 repo = self._repo
767 repo = self._repo
768 cl = repo.unfiltered().changelog
768 cl = repo.unfiltered().changelog
769 ma = repo.manifest
769 ma = repo.manifest
770 # fetch the linkrev
770 # fetch the linkrev
771 fr = filelog.rev(fnode)
771 fr = filelog.rev(fnode)
772 lkr = filelog.linkrev(fr)
772 lkr = filelog.linkrev(fr)
773 # hack to reuse ancestor computation when searching for renames
773 # hack to reuse ancestor computation when searching for renames
774 memberanc = getattr(self, '_ancestrycontext', None)
774 memberanc = getattr(self, '_ancestrycontext', None)
775 iteranc = None
775 iteranc = None
776 if srcrev is None:
776 if srcrev is None:
777 # wctx case, used by workingfilectx during mergecopy
777 # wctx case, used by workingfilectx during mergecopy
778 revs = [p.rev() for p in self._repo[None].parents()]
778 revs = [p.rev() for p in self._repo[None].parents()]
779 inclusive = True # we skipped the real (revless) source
779 inclusive = True # we skipped the real (revless) source
780 else:
780 else:
781 revs = [srcrev]
781 revs = [srcrev]
782 if memberanc is None:
782 if memberanc is None:
783 memberanc = iteranc = cl.ancestors(revs, lkr,
783 memberanc = iteranc = cl.ancestors(revs, lkr,
784 inclusive=inclusive)
784 inclusive=inclusive)
785 # check if this linkrev is an ancestor of srcrev
785 # check if this linkrev is an ancestor of srcrev
786 if lkr not in memberanc:
786 if lkr not in memberanc:
787 if iteranc is None:
787 if iteranc is None:
788 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
788 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
789 for a in iteranc:
789 for a in iteranc:
790 ac = cl.read(a) # get changeset data (we avoid object creation)
790 ac = cl.read(a) # get changeset data (we avoid object creation)
791 if path in ac[3]: # checking the 'files' field.
791 if path in ac[3]: # checking the 'files' field.
792 # The file has been touched, check if the content is
792 # The file has been touched, check if the content is
793 # similar to the one we search for.
793 # similar to the one we search for.
794 if fnode == ma.readfast(ac[0]).get(path):
794 if fnode == ma.readfast(ac[0]).get(path):
795 return a
795 return a
796 # In theory, we should never get out of that loop without a result.
796 # In theory, we should never get out of that loop without a result.
797 # But if manifest uses a buggy file revision (not children of the
797 # But if manifest uses a buggy file revision (not children of the
798 # one it replaces) we could. Such a buggy situation will likely
798 # one it replaces) we could. Such a buggy situation will likely
799 # result is crash somewhere else at to some point.
799 # result is crash somewhere else at to some point.
800 return lkr
800 return lkr
801
801
802 def introrev(self):
802 def introrev(self):
803 """return the rev of the changeset which introduced this file revision
803 """return the rev of the changeset which introduced this file revision
804
804
805 This method is different from linkrev because it take into account the
805 This method is different from linkrev because it take into account the
806 changeset the filectx was created from. It ensures the returned
806 changeset the filectx was created from. It ensures the returned
807 revision is one of its ancestors. This prevents bugs from
807 revision is one of its ancestors. This prevents bugs from
808 'linkrev-shadowing' when a file revision is used by multiple
808 'linkrev-shadowing' when a file revision is used by multiple
809 changesets.
809 changesets.
810 """
810 """
811 lkr = self.linkrev()
811 lkr = self.linkrev()
812 attrs = vars(self)
812 attrs = vars(self)
813 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
813 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
814 if noctx or self.rev() == lkr:
814 if noctx or self.rev() == lkr:
815 return self.linkrev()
815 return self.linkrev()
816 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
816 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
817 self.rev(), inclusive=True)
817 self.rev(), inclusive=True)
818
818
819 def _parentfilectx(self, path, fileid, filelog):
819 def _parentfilectx(self, path, fileid, filelog):
820 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
820 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
821 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
821 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
822 if '_changeid' in vars(self) or '_changectx' in vars(self):
822 if '_changeid' in vars(self) or '_changectx' in vars(self):
823 # If self is associated with a changeset (probably explicitly
823 # If self is associated with a changeset (probably explicitly
824 # fed), ensure the created filectx is associated with a
824 # fed), ensure the created filectx is associated with a
825 # changeset that is an ancestor of self.changectx.
825 # changeset that is an ancestor of self.changectx.
826 # This lets us later use _adjustlinkrev to get a correct link.
826 # This lets us later use _adjustlinkrev to get a correct link.
827 fctx._descendantrev = self.rev()
827 fctx._descendantrev = self.rev()
828 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
828 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
829 elif '_descendantrev' in vars(self):
829 elif '_descendantrev' in vars(self):
830 # Otherwise propagate _descendantrev if we have one associated.
830 # Otherwise propagate _descendantrev if we have one associated.
831 fctx._descendantrev = self._descendantrev
831 fctx._descendantrev = self._descendantrev
832 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
832 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
833 return fctx
833 return fctx
834
834
835 def parents(self):
835 def parents(self):
836 _path = self._path
836 _path = self._path
837 fl = self._filelog
837 fl = self._filelog
838 parents = self._filelog.parents(self._filenode)
838 parents = self._filelog.parents(self._filenode)
839 pl = [(_path, node, fl) for node in parents if node != nullid]
839 pl = [(_path, node, fl) for node in parents if node != nullid]
840
840
841 r = fl.renamed(self._filenode)
841 r = fl.renamed(self._filenode)
842 if r:
842 if r:
843 # - In the simple rename case, both parent are nullid, pl is empty.
843 # - In the simple rename case, both parent are nullid, pl is empty.
844 # - In case of merge, only one of the parent is null id and should
844 # - In case of merge, only one of the parent is null id and should
845 # be replaced with the rename information. This parent is -always-
845 # be replaced with the rename information. This parent is -always-
846 # the first one.
846 # the first one.
847 #
847 #
848 # As null id have always been filtered out in the previous list
848 # As null id have always been filtered out in the previous list
849 # comprehension, inserting to 0 will always result in "replacing
849 # comprehension, inserting to 0 will always result in "replacing
850 # first nullid parent with rename information.
850 # first nullid parent with rename information.
851 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
851 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
852
852
853 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
853 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
854
854
855 def p1(self):
855 def p1(self):
856 return self.parents()[0]
856 return self.parents()[0]
857
857
858 def p2(self):
858 def p2(self):
859 p = self.parents()
859 p = self.parents()
860 if len(p) == 2:
860 if len(p) == 2:
861 return p[1]
861 return p[1]
862 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
862 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
863
863
864 def annotate(self, follow=False, linenumber=None, diffopts=None):
864 def annotate(self, follow=False, linenumber=None, diffopts=None):
865 '''returns a list of tuples of (ctx, line) for each line
865 '''returns a list of tuples of (ctx, line) for each line
866 in the file, where ctx is the filectx of the node where
866 in the file, where ctx is the filectx of the node where
867 that line was last changed.
867 that line was last changed.
868 This returns tuples of ((ctx, linenumber), line) for each line,
868 This returns tuples of ((ctx, linenumber), line) for each line,
869 if "linenumber" parameter is NOT "None".
869 if "linenumber" parameter is NOT "None".
870 In such tuples, linenumber means one at the first appearance
870 In such tuples, linenumber means one at the first appearance
871 in the managed file.
871 in the managed file.
872 To reduce annotation cost,
872 To reduce annotation cost,
873 this returns fixed value(False is used) as linenumber,
873 this returns fixed value(False is used) as linenumber,
874 if "linenumber" parameter is "False".'''
874 if "linenumber" parameter is "False".'''
875
875
876 if linenumber is None:
876 if linenumber is None:
877 def decorate(text, rev):
877 def decorate(text, rev):
878 return ([rev] * len(text.splitlines()), text)
878 return ([rev] * len(text.splitlines()), text)
879 elif linenumber:
879 elif linenumber:
880 def decorate(text, rev):
880 def decorate(text, rev):
881 size = len(text.splitlines())
881 size = len(text.splitlines())
882 return ([(rev, i) for i in xrange(1, size + 1)], text)
882 return ([(rev, i) for i in xrange(1, size + 1)], text)
883 else:
883 else:
884 def decorate(text, rev):
884 def decorate(text, rev):
885 return ([(rev, False)] * len(text.splitlines()), text)
885 return ([(rev, False)] * len(text.splitlines()), text)
886
886
887 def pair(parent, child):
887 def pair(parent, child):
888 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
888 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
889 refine=True)
889 refine=True)
890 for (a1, a2, b1, b2), t in blocks:
890 for (a1, a2, b1, b2), t in blocks:
891 # Changed blocks ('!') or blocks made only of blank lines ('~')
891 # Changed blocks ('!') or blocks made only of blank lines ('~')
892 # belong to the child.
892 # belong to the child.
893 if t == '=':
893 if t == '=':
894 child[0][b1:b2] = parent[0][a1:a2]
894 child[0][b1:b2] = parent[0][a1:a2]
895 return child
895 return child
896
896
897 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
897 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
898
898
899 def parents(f):
899 def parents(f):
900 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
900 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
901 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
901 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
902 # from the topmost introrev (= srcrev) down to p.linkrev() if it
902 # from the topmost introrev (= srcrev) down to p.linkrev() if it
903 # isn't an ancestor of the srcrev.
903 # isn't an ancestor of the srcrev.
904 f._changeid
904 f._changeid
905 pl = f.parents()
905 pl = f.parents()
906
906
907 # Don't return renamed parents if we aren't following.
907 # Don't return renamed parents if we aren't following.
908 if not follow:
908 if not follow:
909 pl = [p for p in pl if p.path() == f.path()]
909 pl = [p for p in pl if p.path() == f.path()]
910
910
911 # renamed filectx won't have a filelog yet, so set it
911 # renamed filectx won't have a filelog yet, so set it
912 # from the cache to save time
912 # from the cache to save time
913 for p in pl:
913 for p in pl:
914 if not '_filelog' in p.__dict__:
914 if not '_filelog' in p.__dict__:
915 p._filelog = getlog(p.path())
915 p._filelog = getlog(p.path())
916
916
917 return pl
917 return pl
918
918
919 # use linkrev to find the first changeset where self appeared
919 # use linkrev to find the first changeset where self appeared
920 base = self
920 base = self
921 introrev = self.introrev()
921 introrev = self.introrev()
922 if self.rev() != introrev:
922 if self.rev() != introrev:
923 base = self.filectx(self.filenode(), changeid=introrev)
923 base = self.filectx(self.filenode(), changeid=introrev)
924 if getattr(base, '_ancestrycontext', None) is None:
924 if getattr(base, '_ancestrycontext', None) is None:
925 cl = self._repo.changelog
925 cl = self._repo.changelog
926 if introrev is None:
926 if introrev is None:
927 # wctx is not inclusive, but works because _ancestrycontext
927 # wctx is not inclusive, but works because _ancestrycontext
928 # is used to test filelog revisions
928 # is used to test filelog revisions
929 ac = cl.ancestors([p.rev() for p in base.parents()],
929 ac = cl.ancestors([p.rev() for p in base.parents()],
930 inclusive=True)
930 inclusive=True)
931 else:
931 else:
932 ac = cl.ancestors([introrev], inclusive=True)
932 ac = cl.ancestors([introrev], inclusive=True)
933 base._ancestrycontext = ac
933 base._ancestrycontext = ac
934
934
935 # This algorithm would prefer to be recursive, but Python is a
935 # This algorithm would prefer to be recursive, but Python is a
936 # bit recursion-hostile. Instead we do an iterative
936 # bit recursion-hostile. Instead we do an iterative
937 # depth-first search.
937 # depth-first search.
938
938
939 visit = [base]
939 visit = [base]
940 hist = {}
940 hist = {}
941 pcache = {}
941 pcache = {}
942 needed = {base: 1}
942 needed = {base: 1}
943 while visit:
943 while visit:
944 f = visit[-1]
944 f = visit[-1]
945 pcached = f in pcache
945 pcached = f in pcache
946 if not pcached:
946 if not pcached:
947 pcache[f] = parents(f)
947 pcache[f] = parents(f)
948
948
949 ready = True
949 ready = True
950 pl = pcache[f]
950 pl = pcache[f]
951 for p in pl:
951 for p in pl:
952 if p not in hist:
952 if p not in hist:
953 ready = False
953 ready = False
954 visit.append(p)
954 visit.append(p)
955 if not pcached:
955 if not pcached:
956 needed[p] = needed.get(p, 0) + 1
956 needed[p] = needed.get(p, 0) + 1
957 if ready:
957 if ready:
958 visit.pop()
958 visit.pop()
959 reusable = f in hist
959 reusable = f in hist
960 if reusable:
960 if reusable:
961 curr = hist[f]
961 curr = hist[f]
962 else:
962 else:
963 curr = decorate(f.data(), f)
963 curr = decorate(f.data(), f)
964 for p in pl:
964 for p in pl:
965 if not reusable:
965 if not reusable:
966 curr = pair(hist[p], curr)
966 curr = pair(hist[p], curr)
967 if needed[p] == 1:
967 if needed[p] == 1:
968 del hist[p]
968 del hist[p]
969 del needed[p]
969 del needed[p]
970 else:
970 else:
971 needed[p] -= 1
971 needed[p] -= 1
972
972
973 hist[f] = curr
973 hist[f] = curr
974 pcache[f] = []
974 pcache[f] = []
975
975
976 return zip(hist[base][0], hist[base][1].splitlines(True))
976 return zip(hist[base][0], hist[base][1].splitlines(True))
977
977
978 def ancestors(self, followfirst=False):
978 def ancestors(self, followfirst=False):
979 visit = {}
979 visit = {}
980 c = self
980 c = self
981 if followfirst:
981 if followfirst:
982 cut = 1
982 cut = 1
983 else:
983 else:
984 cut = None
984 cut = None
985
985
986 while True:
986 while True:
987 for parent in c.parents()[:cut]:
987 for parent in c.parents()[:cut]:
988 visit[(parent.linkrev(), parent.filenode())] = parent
988 visit[(parent.linkrev(), parent.filenode())] = parent
989 if not visit:
989 if not visit:
990 break
990 break
991 c = visit.pop(max(visit))
991 c = visit.pop(max(visit))
992 yield c
992 yield c
993
993
994 class filectx(basefilectx):
994 class filectx(basefilectx):
995 """A filecontext object makes access to data related to a particular
995 """A filecontext object makes access to data related to a particular
996 filerevision convenient."""
996 filerevision convenient."""
997 def __init__(self, repo, path, changeid=None, fileid=None,
997 def __init__(self, repo, path, changeid=None, fileid=None,
998 filelog=None, changectx=None):
998 filelog=None, changectx=None):
999 """changeid can be a changeset revision, node, or tag.
999 """changeid can be a changeset revision, node, or tag.
1000 fileid can be a file revision or node."""
1000 fileid can be a file revision or node."""
1001 self._repo = repo
1001 self._repo = repo
1002 self._path = path
1002 self._path = path
1003
1003
1004 assert (changeid is not None
1004 assert (changeid is not None
1005 or fileid is not None
1005 or fileid is not None
1006 or changectx is not None), \
1006 or changectx is not None), \
1007 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1007 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1008 % (changeid, fileid, changectx))
1008 % (changeid, fileid, changectx))
1009
1009
1010 if filelog is not None:
1010 if filelog is not None:
1011 self._filelog = filelog
1011 self._filelog = filelog
1012
1012
1013 if changeid is not None:
1013 if changeid is not None:
1014 self._changeid = changeid
1014 self._changeid = changeid
1015 if changectx is not None:
1015 if changectx is not None:
1016 self._changectx = changectx
1016 self._changectx = changectx
1017 if fileid is not None:
1017 if fileid is not None:
1018 self._fileid = fileid
1018 self._fileid = fileid
1019
1019
1020 @propertycache
1020 @propertycache
1021 def _changectx(self):
1021 def _changectx(self):
1022 try:
1022 try:
1023 return changectx(self._repo, self._changeid)
1023 return changectx(self._repo, self._changeid)
1024 except error.FilteredRepoLookupError:
1024 except error.FilteredRepoLookupError:
1025 # Linkrev may point to any revision in the repository. When the
1025 # Linkrev may point to any revision in the repository. When the
1026 # repository is filtered this may lead to `filectx` trying to build
1026 # repository is filtered this may lead to `filectx` trying to build
1027 # `changectx` for filtered revision. In such case we fallback to
1027 # `changectx` for filtered revision. In such case we fallback to
1028 # creating `changectx` on the unfiltered version of the reposition.
1028 # creating `changectx` on the unfiltered version of the reposition.
1029 # This fallback should not be an issue because `changectx` from
1029 # This fallback should not be an issue because `changectx` from
1030 # `filectx` are not used in complex operations that care about
1030 # `filectx` are not used in complex operations that care about
1031 # filtering.
1031 # filtering.
1032 #
1032 #
1033 # This fallback is a cheap and dirty fix that prevent several
1033 # This fallback is a cheap and dirty fix that prevent several
1034 # crashes. It does not ensure the behavior is correct. However the
1034 # crashes. It does not ensure the behavior is correct. However the
1035 # behavior was not correct before filtering either and "incorrect
1035 # behavior was not correct before filtering either and "incorrect
1036 # behavior" is seen as better as "crash"
1036 # behavior" is seen as better as "crash"
1037 #
1037 #
1038 # Linkrevs have several serious troubles with filtering that are
1038 # Linkrevs have several serious troubles with filtering that are
1039 # complicated to solve. Proper handling of the issue here should be
1039 # complicated to solve. Proper handling of the issue here should be
1040 # considered when solving linkrev issue are on the table.
1040 # considered when solving linkrev issue are on the table.
1041 return changectx(self._repo.unfiltered(), self._changeid)
1041 return changectx(self._repo.unfiltered(), self._changeid)
1042
1042
1043 def filectx(self, fileid, changeid=None):
1043 def filectx(self, fileid, changeid=None):
1044 '''opens an arbitrary revision of the file without
1044 '''opens an arbitrary revision of the file without
1045 opening a new filelog'''
1045 opening a new filelog'''
1046 return filectx(self._repo, self._path, fileid=fileid,
1046 return filectx(self._repo, self._path, fileid=fileid,
1047 filelog=self._filelog, changeid=changeid)
1047 filelog=self._filelog, changeid=changeid)
1048
1048
1049 def data(self):
1049 def data(self):
1050 try:
1050 try:
1051 return self._filelog.read(self._filenode)
1051 return self._filelog.read(self._filenode)
1052 except error.CensoredNodeError:
1052 except error.CensoredNodeError:
1053 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1053 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1054 return ""
1054 return ""
1055 raise util.Abort(_("censored node: %s") % short(self._filenode),
1055 raise util.Abort(_("censored node: %s") % short(self._filenode),
1056 hint=_("set censor.policy to ignore errors"))
1056 hint=_("set censor.policy to ignore errors"))
1057
1057
1058 def size(self):
1058 def size(self):
1059 return self._filelog.size(self._filerev)
1059 return self._filelog.size(self._filerev)
1060
1060
1061 def renamed(self):
1061 def renamed(self):
1062 """check if file was actually renamed in this changeset revision
1062 """check if file was actually renamed in this changeset revision
1063
1063
1064 If rename logged in file revision, we report copy for changeset only
1064 If rename logged in file revision, we report copy for changeset only
1065 if file revisions linkrev points back to the changeset in question
1065 if file revisions linkrev points back to the changeset in question
1066 or both changeset parents contain different file revisions.
1066 or both changeset parents contain different file revisions.
1067 """
1067 """
1068
1068
1069 renamed = self._filelog.renamed(self._filenode)
1069 renamed = self._filelog.renamed(self._filenode)
1070 if not renamed:
1070 if not renamed:
1071 return renamed
1071 return renamed
1072
1072
1073 if self.rev() == self.linkrev():
1073 if self.rev() == self.linkrev():
1074 return renamed
1074 return renamed
1075
1075
1076 name = self.path()
1076 name = self.path()
1077 fnode = self._filenode
1077 fnode = self._filenode
1078 for p in self._changectx.parents():
1078 for p in self._changectx.parents():
1079 try:
1079 try:
1080 if fnode == p.filenode(name):
1080 if fnode == p.filenode(name):
1081 return None
1081 return None
1082 except error.LookupError:
1082 except error.LookupError:
1083 pass
1083 pass
1084 return renamed
1084 return renamed
1085
1085
1086 def children(self):
1086 def children(self):
1087 # hard for renames
1087 # hard for renames
1088 c = self._filelog.children(self._filenode)
1088 c = self._filelog.children(self._filenode)
1089 return [filectx(self._repo, self._path, fileid=x,
1089 return [filectx(self._repo, self._path, fileid=x,
1090 filelog=self._filelog) for x in c]
1090 filelog=self._filelog) for x in c]
1091
1091
1092 class committablectx(basectx):
1092 class committablectx(basectx):
1093 """A committablectx object provides common functionality for a context that
1093 """A committablectx object provides common functionality for a context that
1094 wants the ability to commit, e.g. workingctx or memctx."""
1094 wants the ability to commit, e.g. workingctx or memctx."""
1095 def __init__(self, repo, text="", user=None, date=None, extra=None,
1095 def __init__(self, repo, text="", user=None, date=None, extra=None,
1096 changes=None):
1096 changes=None):
1097 self._repo = repo
1097 self._repo = repo
1098 self._rev = None
1098 self._rev = None
1099 self._node = None
1099 self._node = None
1100 self._text = text
1100 self._text = text
1101 if date:
1101 if date:
1102 self._date = util.parsedate(date)
1102 self._date = util.parsedate(date)
1103 if user:
1103 if user:
1104 self._user = user
1104 self._user = user
1105 if changes:
1105 if changes:
1106 self._status = changes
1106 self._status = changes
1107
1107
1108 self._extra = {}
1108 self._extra = {}
1109 if extra:
1109 if extra:
1110 self._extra = extra.copy()
1110 self._extra = extra.copy()
1111 if 'branch' not in self._extra:
1111 if 'branch' not in self._extra:
1112 try:
1112 try:
1113 branch = encoding.fromlocal(self._repo.dirstate.branch())
1113 branch = encoding.fromlocal(self._repo.dirstate.branch())
1114 except UnicodeDecodeError:
1114 except UnicodeDecodeError:
1115 raise util.Abort(_('branch name not in UTF-8!'))
1115 raise util.Abort(_('branch name not in UTF-8!'))
1116 self._extra['branch'] = branch
1116 self._extra['branch'] = branch
1117 if self._extra['branch'] == '':
1117 if self._extra['branch'] == '':
1118 self._extra['branch'] = 'default'
1118 self._extra['branch'] = 'default'
1119
1119
1120 def __str__(self):
1120 def __str__(self):
1121 return str(self._parents[0]) + "+"
1121 return str(self._parents[0]) + "+"
1122
1122
1123 def __nonzero__(self):
1123 def __nonzero__(self):
1124 return True
1124 return True
1125
1125
1126 def _buildflagfunc(self):
1126 def _buildflagfunc(self):
1127 # Create a fallback function for getting file flags when the
1127 # Create a fallback function for getting file flags when the
1128 # filesystem doesn't support them
1128 # filesystem doesn't support them
1129
1129
1130 copiesget = self._repo.dirstate.copies().get
1130 copiesget = self._repo.dirstate.copies().get
1131
1131
1132 if len(self._parents) < 2:
1132 if len(self._parents) < 2:
1133 # when we have one parent, it's easy: copy from parent
1133 # when we have one parent, it's easy: copy from parent
1134 man = self._parents[0].manifest()
1134 man = self._parents[0].manifest()
1135 def func(f):
1135 def func(f):
1136 f = copiesget(f, f)
1136 f = copiesget(f, f)
1137 return man.flags(f)
1137 return man.flags(f)
1138 else:
1138 else:
1139 # merges are tricky: we try to reconstruct the unstored
1139 # merges are tricky: we try to reconstruct the unstored
1140 # result from the merge (issue1802)
1140 # result from the merge (issue1802)
1141 p1, p2 = self._parents
1141 p1, p2 = self._parents
1142 pa = p1.ancestor(p2)
1142 pa = p1.ancestor(p2)
1143 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1143 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1144
1144
1145 def func(f):
1145 def func(f):
1146 f = copiesget(f, f) # may be wrong for merges with copies
1146 f = copiesget(f, f) # may be wrong for merges with copies
1147 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1147 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1148 if fl1 == fl2:
1148 if fl1 == fl2:
1149 return fl1
1149 return fl1
1150 if fl1 == fla:
1150 if fl1 == fla:
1151 return fl2
1151 return fl2
1152 if fl2 == fla:
1152 if fl2 == fla:
1153 return fl1
1153 return fl1
1154 return '' # punt for conflicts
1154 return '' # punt for conflicts
1155
1155
1156 return func
1156 return func
1157
1157
1158 @propertycache
1158 @propertycache
1159 def _flagfunc(self):
1159 def _flagfunc(self):
1160 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1160 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1161
1161
1162 @propertycache
1162 @propertycache
1163 def _manifest(self):
1163 def _manifest(self):
1164 """generate a manifest corresponding to the values in self._status
1164 """generate a manifest corresponding to the values in self._status
1165
1165
1166 This reuse the file nodeid from parent, but we append an extra letter
1166 This reuse the file nodeid from parent, but we append an extra letter
1167 when modified. Modified files get an extra 'm' while added files get
1167 when modified. Modified files get an extra 'm' while added files get
1168 an extra 'a'. This is used by manifests merge to see that files
1168 an extra 'a'. This is used by manifests merge to see that files
1169 are different and by update logic to avoid deleting newly added files.
1169 are different and by update logic to avoid deleting newly added files.
1170 """
1170 """
1171
1171
1172 man1 = self._parents[0].manifest()
1172 man1 = self._parents[0].manifest()
1173 man = man1.copy()
1173 man = man1.copy()
1174 if len(self._parents) > 1:
1174 if len(self._parents) > 1:
1175 man2 = self.p2().manifest()
1175 man2 = self.p2().manifest()
1176 def getman(f):
1176 def getman(f):
1177 if f in man1:
1177 if f in man1:
1178 return man1
1178 return man1
1179 return man2
1179 return man2
1180 else:
1180 else:
1181 getman = lambda f: man1
1181 getman = lambda f: man1
1182
1182
1183 copied = self._repo.dirstate.copies()
1183 copied = self._repo.dirstate.copies()
1184 ff = self._flagfunc
1184 ff = self._flagfunc
1185 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1185 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1186 for f in l:
1186 for f in l:
1187 orig = copied.get(f, f)
1187 orig = copied.get(f, f)
1188 man[f] = getman(orig).get(orig, nullid) + i
1188 man[f] = getman(orig).get(orig, nullid) + i
1189 try:
1189 try:
1190 man.setflag(f, ff(f))
1190 man.setflag(f, ff(f))
1191 except OSError:
1191 except OSError:
1192 pass
1192 pass
1193
1193
1194 for f in self._status.deleted + self._status.removed:
1194 for f in self._status.deleted + self._status.removed:
1195 if f in man:
1195 if f in man:
1196 del man[f]
1196 del man[f]
1197
1197
1198 return man
1198 return man
1199
1199
1200 @propertycache
1200 @propertycache
1201 def _status(self):
1201 def _status(self):
1202 return self._repo.status()
1202 return self._repo.status()
1203
1203
1204 @propertycache
1204 @propertycache
1205 def _user(self):
1205 def _user(self):
1206 return self._repo.ui.username()
1206 return self._repo.ui.username()
1207
1207
1208 @propertycache
1208 @propertycache
1209 def _date(self):
1209 def _date(self):
1210 return util.makedate()
1210 return util.makedate()
1211
1211
1212 def subrev(self, subpath):
1212 def subrev(self, subpath):
1213 return None
1213 return None
1214
1214
1215 def manifestnode(self):
1215 def manifestnode(self):
1216 return None
1216 return None
1217 def user(self):
1217 def user(self):
1218 return self._user or self._repo.ui.username()
1218 return self._user or self._repo.ui.username()
1219 def date(self):
1219 def date(self):
1220 return self._date
1220 return self._date
1221 def description(self):
1221 def description(self):
1222 return self._text
1222 return self._text
1223 def files(self):
1223 def files(self):
1224 return sorted(self._status.modified + self._status.added +
1224 return sorted(self._status.modified + self._status.added +
1225 self._status.removed)
1225 self._status.removed)
1226
1226
1227 def modified(self):
1227 def modified(self):
1228 return self._status.modified
1228 return self._status.modified
1229 def added(self):
1229 def added(self):
1230 return self._status.added
1230 return self._status.added
1231 def removed(self):
1231 def removed(self):
1232 return self._status.removed
1232 return self._status.removed
1233 def deleted(self):
1233 def deleted(self):
1234 return self._status.deleted
1234 return self._status.deleted
1235 def branch(self):
1235 def branch(self):
1236 return encoding.tolocal(self._extra['branch'])
1236 return encoding.tolocal(self._extra['branch'])
1237 def closesbranch(self):
1237 def closesbranch(self):
1238 return 'close' in self._extra
1238 return 'close' in self._extra
1239 def extra(self):
1239 def extra(self):
1240 return self._extra
1240 return self._extra
1241
1241
1242 def tags(self):
1242 def tags(self):
1243 t = []
1243 t = []
1244 for p in self.parents():
1244 for p in self.parents():
1245 t.extend(p.tags())
1245 t.extend(p.tags())
1246 return t
1246 return t
1247
1247
1248 def bookmarks(self):
1248 def bookmarks(self):
1249 b = []
1249 b = []
1250 for p in self.parents():
1250 for p in self.parents():
1251 b.extend(p.bookmarks())
1251 b.extend(p.bookmarks())
1252 return b
1252 return b
1253
1253
1254 def phase(self):
1254 def phase(self):
1255 phase = phases.draft # default phase to draft
1255 phase = phases.draft # default phase to draft
1256 for p in self.parents():
1256 for p in self.parents():
1257 phase = max(phase, p.phase())
1257 phase = max(phase, p.phase())
1258 return phase
1258 return phase
1259
1259
1260 def hidden(self):
1260 def hidden(self):
1261 return False
1261 return False
1262
1262
1263 def children(self):
1263 def children(self):
1264 return []
1264 return []
1265
1265
1266 def flags(self, path):
1266 def flags(self, path):
1267 if '_manifest' in self.__dict__:
1267 if '_manifest' in self.__dict__:
1268 try:
1268 try:
1269 return self._manifest.flags(path)
1269 return self._manifest.flags(path)
1270 except KeyError:
1270 except KeyError:
1271 return ''
1271 return ''
1272
1272
1273 try:
1273 try:
1274 return self._flagfunc(path)
1274 return self._flagfunc(path)
1275 except OSError:
1275 except OSError:
1276 return ''
1276 return ''
1277
1277
1278 def ancestor(self, c2):
1278 def ancestor(self, c2):
1279 """return the "best" ancestor context of self and c2"""
1279 """return the "best" ancestor context of self and c2"""
1280 return self._parents[0].ancestor(c2) # punt on two parents for now
1280 return self._parents[0].ancestor(c2) # punt on two parents for now
1281
1281
1282 def walk(self, match):
1282 def walk(self, match):
1283 '''Generates matching file names.'''
1283 '''Generates matching file names.'''
1284 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1284 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1285 True, False))
1285 True, False))
1286
1286
1287 def matches(self, match):
1287 def matches(self, match):
1288 return sorted(self._repo.dirstate.matches(match))
1288 return sorted(self._repo.dirstate.matches(match))
1289
1289
1290 def ancestors(self):
1290 def ancestors(self):
1291 for p in self._parents:
1291 for p in self._parents:
1292 yield p
1292 yield p
1293 for a in self._repo.changelog.ancestors(
1293 for a in self._repo.changelog.ancestors(
1294 [p.rev() for p in self._parents]):
1294 [p.rev() for p in self._parents]):
1295 yield changectx(self._repo, a)
1295 yield changectx(self._repo, a)
1296
1296
1297 def markcommitted(self, node):
1297 def markcommitted(self, node):
1298 """Perform post-commit cleanup necessary after committing this ctx
1298 """Perform post-commit cleanup necessary after committing this ctx
1299
1299
1300 Specifically, this updates backing stores this working context
1300 Specifically, this updates backing stores this working context
1301 wraps to reflect the fact that the changes reflected by this
1301 wraps to reflect the fact that the changes reflected by this
1302 workingctx have been committed. For example, it marks
1302 workingctx have been committed. For example, it marks
1303 modified and added files as normal in the dirstate.
1303 modified and added files as normal in the dirstate.
1304
1304
1305 """
1305 """
1306
1306
1307 self._repo.dirstate.beginparentchange()
1307 self._repo.dirstate.beginparentchange()
1308 for f in self.modified() + self.added():
1308 for f in self.modified() + self.added():
1309 self._repo.dirstate.normal(f)
1309 self._repo.dirstate.normal(f)
1310 for f in self.removed():
1310 for f in self.removed():
1311 self._repo.dirstate.drop(f)
1311 self._repo.dirstate.drop(f)
1312 self._repo.dirstate.setparents(node)
1312 self._repo.dirstate.setparents(node)
1313 self._repo.dirstate.endparentchange()
1313 self._repo.dirstate.endparentchange()
1314
1314
1315 class workingctx(committablectx):
1315 class workingctx(committablectx):
1316 """A workingctx object makes access to data related to
1316 """A workingctx object makes access to data related to
1317 the current working directory convenient.
1317 the current working directory convenient.
1318 date - any valid date string or (unixtime, offset), or None.
1318 date - any valid date string or (unixtime, offset), or None.
1319 user - username string, or None.
1319 user - username string, or None.
1320 extra - a dictionary of extra values, or None.
1320 extra - a dictionary of extra values, or None.
1321 changes - a list of file lists as returned by localrepo.status()
1321 changes - a list of file lists as returned by localrepo.status()
1322 or None to use the repository status.
1322 or None to use the repository status.
1323 """
1323 """
1324 def __init__(self, repo, text="", user=None, date=None, extra=None,
1324 def __init__(self, repo, text="", user=None, date=None, extra=None,
1325 changes=None):
1325 changes=None):
1326 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1326 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1327
1327
1328 def __iter__(self):
1328 def __iter__(self):
1329 d = self._repo.dirstate
1329 d = self._repo.dirstate
1330 for f in d:
1330 for f in d:
1331 if d[f] != 'r':
1331 if d[f] != 'r':
1332 yield f
1332 yield f
1333
1333
1334 def __contains__(self, key):
1334 def __contains__(self, key):
1335 return self._repo.dirstate[key] not in "?r"
1335 return self._repo.dirstate[key] not in "?r"
1336
1336
1337 def hex(self):
1338 return "ff" * 20
1339
1337 @propertycache
1340 @propertycache
1338 def _parents(self):
1341 def _parents(self):
1339 p = self._repo.dirstate.parents()
1342 p = self._repo.dirstate.parents()
1340 if p[1] == nullid:
1343 if p[1] == nullid:
1341 p = p[:-1]
1344 p = p[:-1]
1342 return [changectx(self._repo, x) for x in p]
1345 return [changectx(self._repo, x) for x in p]
1343
1346
1344 def filectx(self, path, filelog=None):
1347 def filectx(self, path, filelog=None):
1345 """get a file context from the working directory"""
1348 """get a file context from the working directory"""
1346 return workingfilectx(self._repo, path, workingctx=self,
1349 return workingfilectx(self._repo, path, workingctx=self,
1347 filelog=filelog)
1350 filelog=filelog)
1348
1351
1349 def dirty(self, missing=False, merge=True, branch=True):
1352 def dirty(self, missing=False, merge=True, branch=True):
1350 "check whether a working directory is modified"
1353 "check whether a working directory is modified"
1351 # check subrepos first
1354 # check subrepos first
1352 for s in sorted(self.substate):
1355 for s in sorted(self.substate):
1353 if self.sub(s).dirty():
1356 if self.sub(s).dirty():
1354 return True
1357 return True
1355 # check current working dir
1358 # check current working dir
1356 return ((merge and self.p2()) or
1359 return ((merge and self.p2()) or
1357 (branch and self.branch() != self.p1().branch()) or
1360 (branch and self.branch() != self.p1().branch()) or
1358 self.modified() or self.added() or self.removed() or
1361 self.modified() or self.added() or self.removed() or
1359 (missing and self.deleted()))
1362 (missing and self.deleted()))
1360
1363
1361 def add(self, list, prefix=""):
1364 def add(self, list, prefix=""):
1362 join = lambda f: os.path.join(prefix, f)
1365 join = lambda f: os.path.join(prefix, f)
1363 wlock = self._repo.wlock()
1366 wlock = self._repo.wlock()
1364 ui, ds = self._repo.ui, self._repo.dirstate
1367 ui, ds = self._repo.ui, self._repo.dirstate
1365 try:
1368 try:
1366 rejected = []
1369 rejected = []
1367 lstat = self._repo.wvfs.lstat
1370 lstat = self._repo.wvfs.lstat
1368 for f in list:
1371 for f in list:
1369 scmutil.checkportable(ui, join(f))
1372 scmutil.checkportable(ui, join(f))
1370 try:
1373 try:
1371 st = lstat(f)
1374 st = lstat(f)
1372 except OSError:
1375 except OSError:
1373 ui.warn(_("%s does not exist!\n") % join(f))
1376 ui.warn(_("%s does not exist!\n") % join(f))
1374 rejected.append(f)
1377 rejected.append(f)
1375 continue
1378 continue
1376 if st.st_size > 10000000:
1379 if st.st_size > 10000000:
1377 ui.warn(_("%s: up to %d MB of RAM may be required "
1380 ui.warn(_("%s: up to %d MB of RAM may be required "
1378 "to manage this file\n"
1381 "to manage this file\n"
1379 "(use 'hg revert %s' to cancel the "
1382 "(use 'hg revert %s' to cancel the "
1380 "pending addition)\n")
1383 "pending addition)\n")
1381 % (f, 3 * st.st_size // 1000000, join(f)))
1384 % (f, 3 * st.st_size // 1000000, join(f)))
1382 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1385 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1383 ui.warn(_("%s not added: only files and symlinks "
1386 ui.warn(_("%s not added: only files and symlinks "
1384 "supported currently\n") % join(f))
1387 "supported currently\n") % join(f))
1385 rejected.append(f)
1388 rejected.append(f)
1386 elif ds[f] in 'amn':
1389 elif ds[f] in 'amn':
1387 ui.warn(_("%s already tracked!\n") % join(f))
1390 ui.warn(_("%s already tracked!\n") % join(f))
1388 elif ds[f] == 'r':
1391 elif ds[f] == 'r':
1389 ds.normallookup(f)
1392 ds.normallookup(f)
1390 else:
1393 else:
1391 ds.add(f)
1394 ds.add(f)
1392 return rejected
1395 return rejected
1393 finally:
1396 finally:
1394 wlock.release()
1397 wlock.release()
1395
1398
1396 def forget(self, files, prefix=""):
1399 def forget(self, files, prefix=""):
1397 join = lambda f: os.path.join(prefix, f)
1400 join = lambda f: os.path.join(prefix, f)
1398 wlock = self._repo.wlock()
1401 wlock = self._repo.wlock()
1399 try:
1402 try:
1400 rejected = []
1403 rejected = []
1401 for f in files:
1404 for f in files:
1402 if f not in self._repo.dirstate:
1405 if f not in self._repo.dirstate:
1403 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1406 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1404 rejected.append(f)
1407 rejected.append(f)
1405 elif self._repo.dirstate[f] != 'a':
1408 elif self._repo.dirstate[f] != 'a':
1406 self._repo.dirstate.remove(f)
1409 self._repo.dirstate.remove(f)
1407 else:
1410 else:
1408 self._repo.dirstate.drop(f)
1411 self._repo.dirstate.drop(f)
1409 return rejected
1412 return rejected
1410 finally:
1413 finally:
1411 wlock.release()
1414 wlock.release()
1412
1415
1413 def undelete(self, list):
1416 def undelete(self, list):
1414 pctxs = self.parents()
1417 pctxs = self.parents()
1415 wlock = self._repo.wlock()
1418 wlock = self._repo.wlock()
1416 try:
1419 try:
1417 for f in list:
1420 for f in list:
1418 if self._repo.dirstate[f] != 'r':
1421 if self._repo.dirstate[f] != 'r':
1419 self._repo.ui.warn(_("%s not removed!\n") % f)
1422 self._repo.ui.warn(_("%s not removed!\n") % f)
1420 else:
1423 else:
1421 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1424 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1422 t = fctx.data()
1425 t = fctx.data()
1423 self._repo.wwrite(f, t, fctx.flags())
1426 self._repo.wwrite(f, t, fctx.flags())
1424 self._repo.dirstate.normal(f)
1427 self._repo.dirstate.normal(f)
1425 finally:
1428 finally:
1426 wlock.release()
1429 wlock.release()
1427
1430
1428 def copy(self, source, dest):
1431 def copy(self, source, dest):
1429 try:
1432 try:
1430 st = self._repo.wvfs.lstat(dest)
1433 st = self._repo.wvfs.lstat(dest)
1431 except OSError, err:
1434 except OSError, err:
1432 if err.errno != errno.ENOENT:
1435 if err.errno != errno.ENOENT:
1433 raise
1436 raise
1434 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1437 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1435 return
1438 return
1436 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1439 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1437 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1440 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1438 "symbolic link\n") % dest)
1441 "symbolic link\n") % dest)
1439 else:
1442 else:
1440 wlock = self._repo.wlock()
1443 wlock = self._repo.wlock()
1441 try:
1444 try:
1442 if self._repo.dirstate[dest] in '?':
1445 if self._repo.dirstate[dest] in '?':
1443 self._repo.dirstate.add(dest)
1446 self._repo.dirstate.add(dest)
1444 elif self._repo.dirstate[dest] in 'r':
1447 elif self._repo.dirstate[dest] in 'r':
1445 self._repo.dirstate.normallookup(dest)
1448 self._repo.dirstate.normallookup(dest)
1446 self._repo.dirstate.copy(source, dest)
1449 self._repo.dirstate.copy(source, dest)
1447 finally:
1450 finally:
1448 wlock.release()
1451 wlock.release()
1449
1452
1450 def match(self, pats=[], include=None, exclude=None, default='glob',
1453 def match(self, pats=[], include=None, exclude=None, default='glob',
1451 listsubrepos=False, badfn=None):
1454 listsubrepos=False, badfn=None):
1452 r = self._repo
1455 r = self._repo
1453
1456
1454 # Only a case insensitive filesystem needs magic to translate user input
1457 # Only a case insensitive filesystem needs magic to translate user input
1455 # to actual case in the filesystem.
1458 # to actual case in the filesystem.
1456 if not util.checkcase(r.root):
1459 if not util.checkcase(r.root):
1457 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1460 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1458 exclude, default, r.auditor, self,
1461 exclude, default, r.auditor, self,
1459 listsubrepos=listsubrepos,
1462 listsubrepos=listsubrepos,
1460 badfn=badfn)
1463 badfn=badfn)
1461 return matchmod.match(r.root, r.getcwd(), pats,
1464 return matchmod.match(r.root, r.getcwd(), pats,
1462 include, exclude, default,
1465 include, exclude, default,
1463 auditor=r.auditor, ctx=self,
1466 auditor=r.auditor, ctx=self,
1464 listsubrepos=listsubrepos, badfn=badfn)
1467 listsubrepos=listsubrepos, badfn=badfn)
1465
1468
1466 def _filtersuspectsymlink(self, files):
1469 def _filtersuspectsymlink(self, files):
1467 if not files or self._repo.dirstate._checklink:
1470 if not files or self._repo.dirstate._checklink:
1468 return files
1471 return files
1469
1472
1470 # Symlink placeholders may get non-symlink-like contents
1473 # Symlink placeholders may get non-symlink-like contents
1471 # via user error or dereferencing by NFS or Samba servers,
1474 # via user error or dereferencing by NFS or Samba servers,
1472 # so we filter out any placeholders that don't look like a
1475 # so we filter out any placeholders that don't look like a
1473 # symlink
1476 # symlink
1474 sane = []
1477 sane = []
1475 for f in files:
1478 for f in files:
1476 if self.flags(f) == 'l':
1479 if self.flags(f) == 'l':
1477 d = self[f].data()
1480 d = self[f].data()
1478 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1481 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1479 self._repo.ui.debug('ignoring suspect symlink placeholder'
1482 self._repo.ui.debug('ignoring suspect symlink placeholder'
1480 ' "%s"\n' % f)
1483 ' "%s"\n' % f)
1481 continue
1484 continue
1482 sane.append(f)
1485 sane.append(f)
1483 return sane
1486 return sane
1484
1487
1485 def _checklookup(self, files):
1488 def _checklookup(self, files):
1486 # check for any possibly clean files
1489 # check for any possibly clean files
1487 if not files:
1490 if not files:
1488 return [], []
1491 return [], []
1489
1492
1490 modified = []
1493 modified = []
1491 fixup = []
1494 fixup = []
1492 pctx = self._parents[0]
1495 pctx = self._parents[0]
1493 # do a full compare of any files that might have changed
1496 # do a full compare of any files that might have changed
1494 for f in sorted(files):
1497 for f in sorted(files):
1495 if (f not in pctx or self.flags(f) != pctx.flags(f)
1498 if (f not in pctx or self.flags(f) != pctx.flags(f)
1496 or pctx[f].cmp(self[f])):
1499 or pctx[f].cmp(self[f])):
1497 modified.append(f)
1500 modified.append(f)
1498 else:
1501 else:
1499 fixup.append(f)
1502 fixup.append(f)
1500
1503
1501 # update dirstate for files that are actually clean
1504 # update dirstate for files that are actually clean
1502 if fixup:
1505 if fixup:
1503 try:
1506 try:
1504 # updating the dirstate is optional
1507 # updating the dirstate is optional
1505 # so we don't wait on the lock
1508 # so we don't wait on the lock
1506 # wlock can invalidate the dirstate, so cache normal _after_
1509 # wlock can invalidate the dirstate, so cache normal _after_
1507 # taking the lock
1510 # taking the lock
1508 wlock = self._repo.wlock(False)
1511 wlock = self._repo.wlock(False)
1509 normal = self._repo.dirstate.normal
1512 normal = self._repo.dirstate.normal
1510 try:
1513 try:
1511 for f in fixup:
1514 for f in fixup:
1512 normal(f)
1515 normal(f)
1513 finally:
1516 finally:
1514 wlock.release()
1517 wlock.release()
1515 except error.LockError:
1518 except error.LockError:
1516 pass
1519 pass
1517 return modified, fixup
1520 return modified, fixup
1518
1521
1519 def _manifestmatches(self, match, s):
1522 def _manifestmatches(self, match, s):
1520 """Slow path for workingctx
1523 """Slow path for workingctx
1521
1524
1522 The fast path is when we compare the working directory to its parent
1525 The fast path is when we compare the working directory to its parent
1523 which means this function is comparing with a non-parent; therefore we
1526 which means this function is comparing with a non-parent; therefore we
1524 need to build a manifest and return what matches.
1527 need to build a manifest and return what matches.
1525 """
1528 """
1526 mf = self._repo['.']._manifestmatches(match, s)
1529 mf = self._repo['.']._manifestmatches(match, s)
1527 for f in s.modified + s.added:
1530 for f in s.modified + s.added:
1528 mf[f] = _newnode
1531 mf[f] = _newnode
1529 mf.setflag(f, self.flags(f))
1532 mf.setflag(f, self.flags(f))
1530 for f in s.removed:
1533 for f in s.removed:
1531 if f in mf:
1534 if f in mf:
1532 del mf[f]
1535 del mf[f]
1533 return mf
1536 return mf
1534
1537
1535 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1538 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1536 unknown=False):
1539 unknown=False):
1537 '''Gets the status from the dirstate -- internal use only.'''
1540 '''Gets the status from the dirstate -- internal use only.'''
1538 listignored, listclean, listunknown = ignored, clean, unknown
1541 listignored, listclean, listunknown = ignored, clean, unknown
1539 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1542 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1540 subrepos = []
1543 subrepos = []
1541 if '.hgsub' in self:
1544 if '.hgsub' in self:
1542 subrepos = sorted(self.substate)
1545 subrepos = sorted(self.substate)
1543 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1546 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1544 listclean, listunknown)
1547 listclean, listunknown)
1545
1548
1546 # check for any possibly clean files
1549 # check for any possibly clean files
1547 if cmp:
1550 if cmp:
1548 modified2, fixup = self._checklookup(cmp)
1551 modified2, fixup = self._checklookup(cmp)
1549 s.modified.extend(modified2)
1552 s.modified.extend(modified2)
1550
1553
1551 # update dirstate for files that are actually clean
1554 # update dirstate for files that are actually clean
1552 if fixup and listclean:
1555 if fixup and listclean:
1553 s.clean.extend(fixup)
1556 s.clean.extend(fixup)
1554
1557
1555 if match.always():
1558 if match.always():
1556 # cache for performance
1559 # cache for performance
1557 if s.unknown or s.ignored or s.clean:
1560 if s.unknown or s.ignored or s.clean:
1558 # "_status" is cached with list*=False in the normal route
1561 # "_status" is cached with list*=False in the normal route
1559 self._status = scmutil.status(s.modified, s.added, s.removed,
1562 self._status = scmutil.status(s.modified, s.added, s.removed,
1560 s.deleted, [], [], [])
1563 s.deleted, [], [], [])
1561 else:
1564 else:
1562 self._status = s
1565 self._status = s
1563
1566
1564 return s
1567 return s
1565
1568
1566 def _buildstatus(self, other, s, match, listignored, listclean,
1569 def _buildstatus(self, other, s, match, listignored, listclean,
1567 listunknown):
1570 listunknown):
1568 """build a status with respect to another context
1571 """build a status with respect to another context
1569
1572
1570 This includes logic for maintaining the fast path of status when
1573 This includes logic for maintaining the fast path of status when
1571 comparing the working directory against its parent, which is to skip
1574 comparing the working directory against its parent, which is to skip
1572 building a new manifest if self (working directory) is not comparing
1575 building a new manifest if self (working directory) is not comparing
1573 against its parent (repo['.']).
1576 against its parent (repo['.']).
1574 """
1577 """
1575 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1578 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1576 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1579 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1577 # might have accidentally ended up with the entire contents of the file
1580 # might have accidentally ended up with the entire contents of the file
1578 # they are supposed to be linking to.
1581 # they are supposed to be linking to.
1579 s.modified[:] = self._filtersuspectsymlink(s.modified)
1582 s.modified[:] = self._filtersuspectsymlink(s.modified)
1580 if other != self._repo['.']:
1583 if other != self._repo['.']:
1581 s = super(workingctx, self)._buildstatus(other, s, match,
1584 s = super(workingctx, self)._buildstatus(other, s, match,
1582 listignored, listclean,
1585 listignored, listclean,
1583 listunknown)
1586 listunknown)
1584 return s
1587 return s
1585
1588
1586 def _matchstatus(self, other, match):
1589 def _matchstatus(self, other, match):
1587 """override the match method with a filter for directory patterns
1590 """override the match method with a filter for directory patterns
1588
1591
1589 We use inheritance to customize the match.bad method only in cases of
1592 We use inheritance to customize the match.bad method only in cases of
1590 workingctx since it belongs only to the working directory when
1593 workingctx since it belongs only to the working directory when
1591 comparing against the parent changeset.
1594 comparing against the parent changeset.
1592
1595
1593 If we aren't comparing against the working directory's parent, then we
1596 If we aren't comparing against the working directory's parent, then we
1594 just use the default match object sent to us.
1597 just use the default match object sent to us.
1595 """
1598 """
1596 superself = super(workingctx, self)
1599 superself = super(workingctx, self)
1597 match = superself._matchstatus(other, match)
1600 match = superself._matchstatus(other, match)
1598 if other != self._repo['.']:
1601 if other != self._repo['.']:
1599 def bad(f, msg):
1602 def bad(f, msg):
1600 # 'f' may be a directory pattern from 'match.files()',
1603 # 'f' may be a directory pattern from 'match.files()',
1601 # so 'f not in ctx1' is not enough
1604 # so 'f not in ctx1' is not enough
1602 if f not in other and not other.hasdir(f):
1605 if f not in other and not other.hasdir(f):
1603 self._repo.ui.warn('%s: %s\n' %
1606 self._repo.ui.warn('%s: %s\n' %
1604 (self._repo.dirstate.pathto(f), msg))
1607 (self._repo.dirstate.pathto(f), msg))
1605 match.bad = bad
1608 match.bad = bad
1606 return match
1609 return match
1607
1610
1608 class committablefilectx(basefilectx):
1611 class committablefilectx(basefilectx):
1609 """A committablefilectx provides common functionality for a file context
1612 """A committablefilectx provides common functionality for a file context
1610 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1613 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1611 def __init__(self, repo, path, filelog=None, ctx=None):
1614 def __init__(self, repo, path, filelog=None, ctx=None):
1612 self._repo = repo
1615 self._repo = repo
1613 self._path = path
1616 self._path = path
1614 self._changeid = None
1617 self._changeid = None
1615 self._filerev = self._filenode = None
1618 self._filerev = self._filenode = None
1616
1619
1617 if filelog is not None:
1620 if filelog is not None:
1618 self._filelog = filelog
1621 self._filelog = filelog
1619 if ctx:
1622 if ctx:
1620 self._changectx = ctx
1623 self._changectx = ctx
1621
1624
1622 def __nonzero__(self):
1625 def __nonzero__(self):
1623 return True
1626 return True
1624
1627
1625 def linkrev(self):
1628 def linkrev(self):
1626 # linked to self._changectx no matter if file is modified or not
1629 # linked to self._changectx no matter if file is modified or not
1627 return self.rev()
1630 return self.rev()
1628
1631
1629 def parents(self):
1632 def parents(self):
1630 '''return parent filectxs, following copies if necessary'''
1633 '''return parent filectxs, following copies if necessary'''
1631 def filenode(ctx, path):
1634 def filenode(ctx, path):
1632 return ctx._manifest.get(path, nullid)
1635 return ctx._manifest.get(path, nullid)
1633
1636
1634 path = self._path
1637 path = self._path
1635 fl = self._filelog
1638 fl = self._filelog
1636 pcl = self._changectx._parents
1639 pcl = self._changectx._parents
1637 renamed = self.renamed()
1640 renamed = self.renamed()
1638
1641
1639 if renamed:
1642 if renamed:
1640 pl = [renamed + (None,)]
1643 pl = [renamed + (None,)]
1641 else:
1644 else:
1642 pl = [(path, filenode(pcl[0], path), fl)]
1645 pl = [(path, filenode(pcl[0], path), fl)]
1643
1646
1644 for pc in pcl[1:]:
1647 for pc in pcl[1:]:
1645 pl.append((path, filenode(pc, path), fl))
1648 pl.append((path, filenode(pc, path), fl))
1646
1649
1647 return [self._parentfilectx(p, fileid=n, filelog=l)
1650 return [self._parentfilectx(p, fileid=n, filelog=l)
1648 for p, n, l in pl if n != nullid]
1651 for p, n, l in pl if n != nullid]
1649
1652
1650 def children(self):
1653 def children(self):
1651 return []
1654 return []
1652
1655
1653 class workingfilectx(committablefilectx):
1656 class workingfilectx(committablefilectx):
1654 """A workingfilectx object makes access to data related to a particular
1657 """A workingfilectx object makes access to data related to a particular
1655 file in the working directory convenient."""
1658 file in the working directory convenient."""
1656 def __init__(self, repo, path, filelog=None, workingctx=None):
1659 def __init__(self, repo, path, filelog=None, workingctx=None):
1657 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1660 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1658
1661
1659 @propertycache
1662 @propertycache
1660 def _changectx(self):
1663 def _changectx(self):
1661 return workingctx(self._repo)
1664 return workingctx(self._repo)
1662
1665
1663 def data(self):
1666 def data(self):
1664 return self._repo.wread(self._path)
1667 return self._repo.wread(self._path)
1665 def renamed(self):
1668 def renamed(self):
1666 rp = self._repo.dirstate.copied(self._path)
1669 rp = self._repo.dirstate.copied(self._path)
1667 if not rp:
1670 if not rp:
1668 return None
1671 return None
1669 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1672 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1670
1673
1671 def size(self):
1674 def size(self):
1672 return self._repo.wvfs.lstat(self._path).st_size
1675 return self._repo.wvfs.lstat(self._path).st_size
1673 def date(self):
1676 def date(self):
1674 t, tz = self._changectx.date()
1677 t, tz = self._changectx.date()
1675 try:
1678 try:
1676 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1679 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1677 except OSError, err:
1680 except OSError, err:
1678 if err.errno != errno.ENOENT:
1681 if err.errno != errno.ENOENT:
1679 raise
1682 raise
1680 return (t, tz)
1683 return (t, tz)
1681
1684
1682 def cmp(self, fctx):
1685 def cmp(self, fctx):
1683 """compare with other file context
1686 """compare with other file context
1684
1687
1685 returns True if different than fctx.
1688 returns True if different than fctx.
1686 """
1689 """
1687 # fctx should be a filectx (not a workingfilectx)
1690 # fctx should be a filectx (not a workingfilectx)
1688 # invert comparison to reuse the same code path
1691 # invert comparison to reuse the same code path
1689 return fctx.cmp(self)
1692 return fctx.cmp(self)
1690
1693
1691 def remove(self, ignoremissing=False):
1694 def remove(self, ignoremissing=False):
1692 """wraps unlink for a repo's working directory"""
1695 """wraps unlink for a repo's working directory"""
1693 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1696 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1694
1697
1695 def write(self, data, flags):
1698 def write(self, data, flags):
1696 """wraps repo.wwrite"""
1699 """wraps repo.wwrite"""
1697 self._repo.wwrite(self._path, data, flags)
1700 self._repo.wwrite(self._path, data, flags)
1698
1701
1699 class workingcommitctx(workingctx):
1702 class workingcommitctx(workingctx):
1700 """A workingcommitctx object makes access to data related to
1703 """A workingcommitctx object makes access to data related to
1701 the revision being committed convenient.
1704 the revision being committed convenient.
1702
1705
1703 This hides changes in the working directory, if they aren't
1706 This hides changes in the working directory, if they aren't
1704 committed in this context.
1707 committed in this context.
1705 """
1708 """
1706 def __init__(self, repo, changes,
1709 def __init__(self, repo, changes,
1707 text="", user=None, date=None, extra=None):
1710 text="", user=None, date=None, extra=None):
1708 super(workingctx, self).__init__(repo, text, user, date, extra,
1711 super(workingctx, self).__init__(repo, text, user, date, extra,
1709 changes)
1712 changes)
1710
1713
1711 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1714 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1712 unknown=False):
1715 unknown=False):
1713 """Return matched files only in ``self._status``
1716 """Return matched files only in ``self._status``
1714
1717
1715 Uncommitted files appear "clean" via this context, even if
1718 Uncommitted files appear "clean" via this context, even if
1716 they aren't actually so in the working directory.
1719 they aren't actually so in the working directory.
1717 """
1720 """
1718 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1721 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1719 if clean:
1722 if clean:
1720 clean = [f for f in self._manifest if f not in self._changedset]
1723 clean = [f for f in self._manifest if f not in self._changedset]
1721 else:
1724 else:
1722 clean = []
1725 clean = []
1723 return scmutil.status([f for f in self._status.modified if match(f)],
1726 return scmutil.status([f for f in self._status.modified if match(f)],
1724 [f for f in self._status.added if match(f)],
1727 [f for f in self._status.added if match(f)],
1725 [f for f in self._status.removed if match(f)],
1728 [f for f in self._status.removed if match(f)],
1726 [], [], [], clean)
1729 [], [], [], clean)
1727
1730
1728 @propertycache
1731 @propertycache
1729 def _changedset(self):
1732 def _changedset(self):
1730 """Return the set of files changed in this context
1733 """Return the set of files changed in this context
1731 """
1734 """
1732 changed = set(self._status.modified)
1735 changed = set(self._status.modified)
1733 changed.update(self._status.added)
1736 changed.update(self._status.added)
1734 changed.update(self._status.removed)
1737 changed.update(self._status.removed)
1735 return changed
1738 return changed
1736
1739
1737 class memctx(committablectx):
1740 class memctx(committablectx):
1738 """Use memctx to perform in-memory commits via localrepo.commitctx().
1741 """Use memctx to perform in-memory commits via localrepo.commitctx().
1739
1742
1740 Revision information is supplied at initialization time while
1743 Revision information is supplied at initialization time while
1741 related files data and is made available through a callback
1744 related files data and is made available through a callback
1742 mechanism. 'repo' is the current localrepo, 'parents' is a
1745 mechanism. 'repo' is the current localrepo, 'parents' is a
1743 sequence of two parent revisions identifiers (pass None for every
1746 sequence of two parent revisions identifiers (pass None for every
1744 missing parent), 'text' is the commit message and 'files' lists
1747 missing parent), 'text' is the commit message and 'files' lists
1745 names of files touched by the revision (normalized and relative to
1748 names of files touched by the revision (normalized and relative to
1746 repository root).
1749 repository root).
1747
1750
1748 filectxfn(repo, memctx, path) is a callable receiving the
1751 filectxfn(repo, memctx, path) is a callable receiving the
1749 repository, the current memctx object and the normalized path of
1752 repository, the current memctx object and the normalized path of
1750 requested file, relative to repository root. It is fired by the
1753 requested file, relative to repository root. It is fired by the
1751 commit function for every file in 'files', but calls order is
1754 commit function for every file in 'files', but calls order is
1752 undefined. If the file is available in the revision being
1755 undefined. If the file is available in the revision being
1753 committed (updated or added), filectxfn returns a memfilectx
1756 committed (updated or added), filectxfn returns a memfilectx
1754 object. If the file was removed, filectxfn raises an
1757 object. If the file was removed, filectxfn raises an
1755 IOError. Moved files are represented by marking the source file
1758 IOError. Moved files are represented by marking the source file
1756 removed and the new file added with copy information (see
1759 removed and the new file added with copy information (see
1757 memfilectx).
1760 memfilectx).
1758
1761
1759 user receives the committer name and defaults to current
1762 user receives the committer name and defaults to current
1760 repository username, date is the commit date in any format
1763 repository username, date is the commit date in any format
1761 supported by util.parsedate() and defaults to current date, extra
1764 supported by util.parsedate() and defaults to current date, extra
1762 is a dictionary of metadata or is left empty.
1765 is a dictionary of metadata or is left empty.
1763 """
1766 """
1764
1767
1765 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1768 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1766 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1769 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1767 # this field to determine what to do in filectxfn.
1770 # this field to determine what to do in filectxfn.
1768 _returnnoneformissingfiles = True
1771 _returnnoneformissingfiles = True
1769
1772
1770 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1773 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1771 date=None, extra=None, editor=False):
1774 date=None, extra=None, editor=False):
1772 super(memctx, self).__init__(repo, text, user, date, extra)
1775 super(memctx, self).__init__(repo, text, user, date, extra)
1773 self._rev = None
1776 self._rev = None
1774 self._node = None
1777 self._node = None
1775 parents = [(p or nullid) for p in parents]
1778 parents = [(p or nullid) for p in parents]
1776 p1, p2 = parents
1779 p1, p2 = parents
1777 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1780 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1778 files = sorted(set(files))
1781 files = sorted(set(files))
1779 self._files = files
1782 self._files = files
1780 self.substate = {}
1783 self.substate = {}
1781
1784
1782 # if store is not callable, wrap it in a function
1785 # if store is not callable, wrap it in a function
1783 if not callable(filectxfn):
1786 if not callable(filectxfn):
1784 def getfilectx(repo, memctx, path):
1787 def getfilectx(repo, memctx, path):
1785 fctx = filectxfn[path]
1788 fctx = filectxfn[path]
1786 # this is weird but apparently we only keep track of one parent
1789 # this is weird but apparently we only keep track of one parent
1787 # (why not only store that instead of a tuple?)
1790 # (why not only store that instead of a tuple?)
1788 copied = fctx.renamed()
1791 copied = fctx.renamed()
1789 if copied:
1792 if copied:
1790 copied = copied[0]
1793 copied = copied[0]
1791 return memfilectx(repo, path, fctx.data(),
1794 return memfilectx(repo, path, fctx.data(),
1792 islink=fctx.islink(), isexec=fctx.isexec(),
1795 islink=fctx.islink(), isexec=fctx.isexec(),
1793 copied=copied, memctx=memctx)
1796 copied=copied, memctx=memctx)
1794 self._filectxfn = getfilectx
1797 self._filectxfn = getfilectx
1795 else:
1798 else:
1796 # "util.cachefunc" reduces invocation of possibly expensive
1799 # "util.cachefunc" reduces invocation of possibly expensive
1797 # "filectxfn" for performance (e.g. converting from another VCS)
1800 # "filectxfn" for performance (e.g. converting from another VCS)
1798 self._filectxfn = util.cachefunc(filectxfn)
1801 self._filectxfn = util.cachefunc(filectxfn)
1799
1802
1800 if extra:
1803 if extra:
1801 self._extra = extra.copy()
1804 self._extra = extra.copy()
1802 else:
1805 else:
1803 self._extra = {}
1806 self._extra = {}
1804
1807
1805 if self._extra.get('branch', '') == '':
1808 if self._extra.get('branch', '') == '':
1806 self._extra['branch'] = 'default'
1809 self._extra['branch'] = 'default'
1807
1810
1808 if editor:
1811 if editor:
1809 self._text = editor(self._repo, self, [])
1812 self._text = editor(self._repo, self, [])
1810 self._repo.savecommitmessage(self._text)
1813 self._repo.savecommitmessage(self._text)
1811
1814
1812 def filectx(self, path, filelog=None):
1815 def filectx(self, path, filelog=None):
1813 """get a file context from the working directory
1816 """get a file context from the working directory
1814
1817
1815 Returns None if file doesn't exist and should be removed."""
1818 Returns None if file doesn't exist and should be removed."""
1816 return self._filectxfn(self._repo, self, path)
1819 return self._filectxfn(self._repo, self, path)
1817
1820
1818 def commit(self):
1821 def commit(self):
1819 """commit context to the repo"""
1822 """commit context to the repo"""
1820 return self._repo.commitctx(self)
1823 return self._repo.commitctx(self)
1821
1824
1822 @propertycache
1825 @propertycache
1823 def _manifest(self):
1826 def _manifest(self):
1824 """generate a manifest based on the return values of filectxfn"""
1827 """generate a manifest based on the return values of filectxfn"""
1825
1828
1826 # keep this simple for now; just worry about p1
1829 # keep this simple for now; just worry about p1
1827 pctx = self._parents[0]
1830 pctx = self._parents[0]
1828 man = pctx.manifest().copy()
1831 man = pctx.manifest().copy()
1829
1832
1830 for f in self._status.modified:
1833 for f in self._status.modified:
1831 p1node = nullid
1834 p1node = nullid
1832 p2node = nullid
1835 p2node = nullid
1833 p = pctx[f].parents() # if file isn't in pctx, check p2?
1836 p = pctx[f].parents() # if file isn't in pctx, check p2?
1834 if len(p) > 0:
1837 if len(p) > 0:
1835 p1node = p[0].node()
1838 p1node = p[0].node()
1836 if len(p) > 1:
1839 if len(p) > 1:
1837 p2node = p[1].node()
1840 p2node = p[1].node()
1838 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1841 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1839
1842
1840 for f in self._status.added:
1843 for f in self._status.added:
1841 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1844 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1842
1845
1843 for f in self._status.removed:
1846 for f in self._status.removed:
1844 if f in man:
1847 if f in man:
1845 del man[f]
1848 del man[f]
1846
1849
1847 return man
1850 return man
1848
1851
1849 @propertycache
1852 @propertycache
1850 def _status(self):
1853 def _status(self):
1851 """Calculate exact status from ``files`` specified at construction
1854 """Calculate exact status from ``files`` specified at construction
1852 """
1855 """
1853 man1 = self.p1().manifest()
1856 man1 = self.p1().manifest()
1854 p2 = self._parents[1]
1857 p2 = self._parents[1]
1855 # "1 < len(self._parents)" can't be used for checking
1858 # "1 < len(self._parents)" can't be used for checking
1856 # existence of the 2nd parent, because "memctx._parents" is
1859 # existence of the 2nd parent, because "memctx._parents" is
1857 # explicitly initialized by the list, of which length is 2.
1860 # explicitly initialized by the list, of which length is 2.
1858 if p2.node() != nullid:
1861 if p2.node() != nullid:
1859 man2 = p2.manifest()
1862 man2 = p2.manifest()
1860 managing = lambda f: f in man1 or f in man2
1863 managing = lambda f: f in man1 or f in man2
1861 else:
1864 else:
1862 managing = lambda f: f in man1
1865 managing = lambda f: f in man1
1863
1866
1864 modified, added, removed = [], [], []
1867 modified, added, removed = [], [], []
1865 for f in self._files:
1868 for f in self._files:
1866 if not managing(f):
1869 if not managing(f):
1867 added.append(f)
1870 added.append(f)
1868 elif self[f]:
1871 elif self[f]:
1869 modified.append(f)
1872 modified.append(f)
1870 else:
1873 else:
1871 removed.append(f)
1874 removed.append(f)
1872
1875
1873 return scmutil.status(modified, added, removed, [], [], [], [])
1876 return scmutil.status(modified, added, removed, [], [], [], [])
1874
1877
1875 class memfilectx(committablefilectx):
1878 class memfilectx(committablefilectx):
1876 """memfilectx represents an in-memory file to commit.
1879 """memfilectx represents an in-memory file to commit.
1877
1880
1878 See memctx and committablefilectx for more details.
1881 See memctx and committablefilectx for more details.
1879 """
1882 """
1880 def __init__(self, repo, path, data, islink=False,
1883 def __init__(self, repo, path, data, islink=False,
1881 isexec=False, copied=None, memctx=None):
1884 isexec=False, copied=None, memctx=None):
1882 """
1885 """
1883 path is the normalized file path relative to repository root.
1886 path is the normalized file path relative to repository root.
1884 data is the file content as a string.
1887 data is the file content as a string.
1885 islink is True if the file is a symbolic link.
1888 islink is True if the file is a symbolic link.
1886 isexec is True if the file is executable.
1889 isexec is True if the file is executable.
1887 copied is the source file path if current file was copied in the
1890 copied is the source file path if current file was copied in the
1888 revision being committed, or None."""
1891 revision being committed, or None."""
1889 super(memfilectx, self).__init__(repo, path, None, memctx)
1892 super(memfilectx, self).__init__(repo, path, None, memctx)
1890 self._data = data
1893 self._data = data
1891 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1894 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1892 self._copied = None
1895 self._copied = None
1893 if copied:
1896 if copied:
1894 self._copied = (copied, nullid)
1897 self._copied = (copied, nullid)
1895
1898
1896 def data(self):
1899 def data(self):
1897 return self._data
1900 return self._data
1898 def size(self):
1901 def size(self):
1899 return len(self.data())
1902 return len(self.data())
1900 def flags(self):
1903 def flags(self):
1901 return self._flags
1904 return self._flags
1902 def renamed(self):
1905 def renamed(self):
1903 return self._copied
1906 return self._copied
1904
1907
1905 def remove(self, ignoremissing=False):
1908 def remove(self, ignoremissing=False):
1906 """wraps unlink for a repo's working directory"""
1909 """wraps unlink for a repo's working directory"""
1907 # need to figure out what to do here
1910 # need to figure out what to do here
1908 del self._changectx[self._path]
1911 del self._changectx[self._path]
1909
1912
1910 def write(self, data, flags):
1913 def write(self, data, flags):
1911 """wraps repo.wwrite"""
1914 """wraps repo.wwrite"""
1912 self._data = data
1915 self._data = data
General Comments 0
You need to be logged in to leave comments. Login now