##// END OF EJS Templates
context: no longer accept diff options as dictionnary...
Boris Feld -
r38588:62249cfe default
parent child Browse files
Show More
@@ -1,2546 +1,2542
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 addednodeid,
17 addednodeid,
18 bin,
18 bin,
19 hex,
19 hex,
20 modifiednodeid,
20 modifiednodeid,
21 nullid,
21 nullid,
22 nullrev,
22 nullrev,
23 short,
23 short,
24 wdirfilenodeids,
24 wdirfilenodeids,
25 wdirid,
25 wdirid,
26 )
26 )
27 from . import (
27 from . import (
28 dagop,
28 dagop,
29 encoding,
29 encoding,
30 error,
30 error,
31 fileset,
31 fileset,
32 match as matchmod,
32 match as matchmod,
33 mdiff,
34 obsolete as obsmod,
33 obsolete as obsmod,
35 patch,
34 patch,
36 pathutil,
35 pathutil,
37 phases,
36 phases,
38 pycompat,
37 pycompat,
39 repoview,
38 repoview,
40 revlog,
39 revlog,
41 scmutil,
40 scmutil,
42 sparse,
41 sparse,
43 subrepo,
42 subrepo,
44 subrepoutil,
43 subrepoutil,
45 util,
44 util,
46 )
45 )
47 from .utils import (
46 from .utils import (
48 dateutil,
47 dateutil,
49 stringutil,
48 stringutil,
50 )
49 )
51
50
52 propertycache = util.propertycache
51 propertycache = util.propertycache
53
52
54 class basectx(object):
53 class basectx(object):
55 """A basectx object represents the common logic for its children:
54 """A basectx object represents the common logic for its children:
56 changectx: read-only context that is already present in the repo,
55 changectx: read-only context that is already present in the repo,
57 workingctx: a context that represents the working directory and can
56 workingctx: a context that represents the working directory and can
58 be committed,
57 be committed,
59 memctx: a context that represents changes in-memory and can also
58 memctx: a context that represents changes in-memory and can also
60 be committed."""
59 be committed."""
61
60
62 def __init__(self, repo):
61 def __init__(self, repo):
63 self._repo = repo
62 self._repo = repo
64
63
65 def __bytes__(self):
64 def __bytes__(self):
66 return short(self.node())
65 return short(self.node())
67
66
68 __str__ = encoding.strmethod(__bytes__)
67 __str__ = encoding.strmethod(__bytes__)
69
68
70 def __repr__(self):
69 def __repr__(self):
71 return r"<%s %s>" % (type(self).__name__, str(self))
70 return r"<%s %s>" % (type(self).__name__, str(self))
72
71
73 def __eq__(self, other):
72 def __eq__(self, other):
74 try:
73 try:
75 return type(self) == type(other) and self._rev == other._rev
74 return type(self) == type(other) and self._rev == other._rev
76 except AttributeError:
75 except AttributeError:
77 return False
76 return False
78
77
79 def __ne__(self, other):
78 def __ne__(self, other):
80 return not (self == other)
79 return not (self == other)
81
80
82 def __contains__(self, key):
81 def __contains__(self, key):
83 return key in self._manifest
82 return key in self._manifest
84
83
85 def __getitem__(self, key):
84 def __getitem__(self, key):
86 return self.filectx(key)
85 return self.filectx(key)
87
86
88 def __iter__(self):
87 def __iter__(self):
89 return iter(self._manifest)
88 return iter(self._manifest)
90
89
91 def _buildstatusmanifest(self, status):
90 def _buildstatusmanifest(self, status):
92 """Builds a manifest that includes the given status results, if this is
91 """Builds a manifest that includes the given status results, if this is
93 a working copy context. For non-working copy contexts, it just returns
92 a working copy context. For non-working copy contexts, it just returns
94 the normal manifest."""
93 the normal manifest."""
95 return self.manifest()
94 return self.manifest()
96
95
97 def _matchstatus(self, other, match):
96 def _matchstatus(self, other, match):
98 """This internal method provides a way for child objects to override the
97 """This internal method provides a way for child objects to override the
99 match operator.
98 match operator.
100 """
99 """
101 return match
100 return match
102
101
103 def _buildstatus(self, other, s, match, listignored, listclean,
102 def _buildstatus(self, other, s, match, listignored, listclean,
104 listunknown):
103 listunknown):
105 """build a status with respect to another context"""
104 """build a status with respect to another context"""
106 # Load earliest manifest first for caching reasons. More specifically,
105 # Load earliest manifest first for caching reasons. More specifically,
107 # if you have revisions 1000 and 1001, 1001 is probably stored as a
106 # if you have revisions 1000 and 1001, 1001 is probably stored as a
108 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
107 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
109 # 1000 and cache it so that when you read 1001, we just need to apply a
108 # 1000 and cache it so that when you read 1001, we just need to apply a
110 # delta to what's in the cache. So that's one full reconstruction + one
109 # delta to what's in the cache. So that's one full reconstruction + one
111 # delta application.
110 # delta application.
112 mf2 = None
111 mf2 = None
113 if self.rev() is not None and self.rev() < other.rev():
112 if self.rev() is not None and self.rev() < other.rev():
114 mf2 = self._buildstatusmanifest(s)
113 mf2 = self._buildstatusmanifest(s)
115 mf1 = other._buildstatusmanifest(s)
114 mf1 = other._buildstatusmanifest(s)
116 if mf2 is None:
115 if mf2 is None:
117 mf2 = self._buildstatusmanifest(s)
116 mf2 = self._buildstatusmanifest(s)
118
117
119 modified, added = [], []
118 modified, added = [], []
120 removed = []
119 removed = []
121 clean = []
120 clean = []
122 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
121 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
123 deletedset = set(deleted)
122 deletedset = set(deleted)
124 d = mf1.diff(mf2, match=match, clean=listclean)
123 d = mf1.diff(mf2, match=match, clean=listclean)
125 for fn, value in d.iteritems():
124 for fn, value in d.iteritems():
126 if fn in deletedset:
125 if fn in deletedset:
127 continue
126 continue
128 if value is None:
127 if value is None:
129 clean.append(fn)
128 clean.append(fn)
130 continue
129 continue
131 (node1, flag1), (node2, flag2) = value
130 (node1, flag1), (node2, flag2) = value
132 if node1 is None:
131 if node1 is None:
133 added.append(fn)
132 added.append(fn)
134 elif node2 is None:
133 elif node2 is None:
135 removed.append(fn)
134 removed.append(fn)
136 elif flag1 != flag2:
135 elif flag1 != flag2:
137 modified.append(fn)
136 modified.append(fn)
138 elif node2 not in wdirfilenodeids:
137 elif node2 not in wdirfilenodeids:
139 # When comparing files between two commits, we save time by
138 # When comparing files between two commits, we save time by
140 # not comparing the file contents when the nodeids differ.
139 # not comparing the file contents when the nodeids differ.
141 # Note that this means we incorrectly report a reverted change
140 # Note that this means we incorrectly report a reverted change
142 # to a file as a modification.
141 # to a file as a modification.
143 modified.append(fn)
142 modified.append(fn)
144 elif self[fn].cmp(other[fn]):
143 elif self[fn].cmp(other[fn]):
145 modified.append(fn)
144 modified.append(fn)
146 else:
145 else:
147 clean.append(fn)
146 clean.append(fn)
148
147
149 if removed:
148 if removed:
150 # need to filter files if they are already reported as removed
149 # need to filter files if they are already reported as removed
151 unknown = [fn for fn in unknown if fn not in mf1 and
150 unknown = [fn for fn in unknown if fn not in mf1 and
152 (not match or match(fn))]
151 (not match or match(fn))]
153 ignored = [fn for fn in ignored if fn not in mf1 and
152 ignored = [fn for fn in ignored if fn not in mf1 and
154 (not match or match(fn))]
153 (not match or match(fn))]
155 # if they're deleted, don't report them as removed
154 # if they're deleted, don't report them as removed
156 removed = [fn for fn in removed if fn not in deletedset]
155 removed = [fn for fn in removed if fn not in deletedset]
157
156
158 return scmutil.status(modified, added, removed, deleted, unknown,
157 return scmutil.status(modified, added, removed, deleted, unknown,
159 ignored, clean)
158 ignored, clean)
160
159
161 @propertycache
160 @propertycache
162 def substate(self):
161 def substate(self):
163 return subrepoutil.state(self, self._repo.ui)
162 return subrepoutil.state(self, self._repo.ui)
164
163
165 def subrev(self, subpath):
164 def subrev(self, subpath):
166 return self.substate[subpath][1]
165 return self.substate[subpath][1]
167
166
168 def rev(self):
167 def rev(self):
169 return self._rev
168 return self._rev
170 def node(self):
169 def node(self):
171 return self._node
170 return self._node
172 def hex(self):
171 def hex(self):
173 return hex(self.node())
172 return hex(self.node())
174 def manifest(self):
173 def manifest(self):
175 return self._manifest
174 return self._manifest
176 def manifestctx(self):
175 def manifestctx(self):
177 return self._manifestctx
176 return self._manifestctx
178 def repo(self):
177 def repo(self):
179 return self._repo
178 return self._repo
180 def phasestr(self):
179 def phasestr(self):
181 return phases.phasenames[self.phase()]
180 return phases.phasenames[self.phase()]
182 def mutable(self):
181 def mutable(self):
183 return self.phase() > phases.public
182 return self.phase() > phases.public
184
183
185 def getfileset(self, expr):
184 def getfileset(self, expr):
186 return fileset.getfileset(self, expr)
185 return fileset.getfileset(self, expr)
187
186
188 def obsolete(self):
187 def obsolete(self):
189 """True if the changeset is obsolete"""
188 """True if the changeset is obsolete"""
190 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
189 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
191
190
192 def extinct(self):
191 def extinct(self):
193 """True if the changeset is extinct"""
192 """True if the changeset is extinct"""
194 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
193 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
195
194
196 def orphan(self):
195 def orphan(self):
197 """True if the changeset is not obsolete but it's ancestor are"""
196 """True if the changeset is not obsolete but it's ancestor are"""
198 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
197 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
199
198
200 def phasedivergent(self):
199 def phasedivergent(self):
201 """True if the changeset try to be a successor of a public changeset
200 """True if the changeset try to be a successor of a public changeset
202
201
203 Only non-public and non-obsolete changesets may be bumped.
202 Only non-public and non-obsolete changesets may be bumped.
204 """
203 """
205 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
204 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
206
205
207 def contentdivergent(self):
206 def contentdivergent(self):
208 """Is a successors of a changeset with multiple possible successors set
207 """Is a successors of a changeset with multiple possible successors set
209
208
210 Only non-public and non-obsolete changesets may be divergent.
209 Only non-public and non-obsolete changesets may be divergent.
211 """
210 """
212 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
211 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
213
212
214 def isunstable(self):
213 def isunstable(self):
215 """True if the changeset is either unstable, bumped or divergent"""
214 """True if the changeset is either unstable, bumped or divergent"""
216 return self.orphan() or self.phasedivergent() or self.contentdivergent()
215 return self.orphan() or self.phasedivergent() or self.contentdivergent()
217
216
218 def instabilities(self):
217 def instabilities(self):
219 """return the list of instabilities affecting this changeset.
218 """return the list of instabilities affecting this changeset.
220
219
221 Instabilities are returned as strings. possible values are:
220 Instabilities are returned as strings. possible values are:
222 - orphan,
221 - orphan,
223 - phase-divergent,
222 - phase-divergent,
224 - content-divergent.
223 - content-divergent.
225 """
224 """
226 instabilities = []
225 instabilities = []
227 if self.orphan():
226 if self.orphan():
228 instabilities.append('orphan')
227 instabilities.append('orphan')
229 if self.phasedivergent():
228 if self.phasedivergent():
230 instabilities.append('phase-divergent')
229 instabilities.append('phase-divergent')
231 if self.contentdivergent():
230 if self.contentdivergent():
232 instabilities.append('content-divergent')
231 instabilities.append('content-divergent')
233 return instabilities
232 return instabilities
234
233
235 def parents(self):
234 def parents(self):
236 """return contexts for each parent changeset"""
235 """return contexts for each parent changeset"""
237 return self._parents
236 return self._parents
238
237
239 def p1(self):
238 def p1(self):
240 return self._parents[0]
239 return self._parents[0]
241
240
242 def p2(self):
241 def p2(self):
243 parents = self._parents
242 parents = self._parents
244 if len(parents) == 2:
243 if len(parents) == 2:
245 return parents[1]
244 return parents[1]
246 return changectx(self._repo, nullrev)
245 return changectx(self._repo, nullrev)
247
246
248 def _fileinfo(self, path):
247 def _fileinfo(self, path):
249 if r'_manifest' in self.__dict__:
248 if r'_manifest' in self.__dict__:
250 try:
249 try:
251 return self._manifest[path], self._manifest.flags(path)
250 return self._manifest[path], self._manifest.flags(path)
252 except KeyError:
251 except KeyError:
253 raise error.ManifestLookupError(self._node, path,
252 raise error.ManifestLookupError(self._node, path,
254 _('not found in manifest'))
253 _('not found in manifest'))
255 if r'_manifestdelta' in self.__dict__ or path in self.files():
254 if r'_manifestdelta' in self.__dict__ or path in self.files():
256 if path in self._manifestdelta:
255 if path in self._manifestdelta:
257 return (self._manifestdelta[path],
256 return (self._manifestdelta[path],
258 self._manifestdelta.flags(path))
257 self._manifestdelta.flags(path))
259 mfl = self._repo.manifestlog
258 mfl = self._repo.manifestlog
260 try:
259 try:
261 node, flag = mfl[self._changeset.manifest].find(path)
260 node, flag = mfl[self._changeset.manifest].find(path)
262 except KeyError:
261 except KeyError:
263 raise error.ManifestLookupError(self._node, path,
262 raise error.ManifestLookupError(self._node, path,
264 _('not found in manifest'))
263 _('not found in manifest'))
265
264
266 return node, flag
265 return node, flag
267
266
268 def filenode(self, path):
267 def filenode(self, path):
269 return self._fileinfo(path)[0]
268 return self._fileinfo(path)[0]
270
269
271 def flags(self, path):
270 def flags(self, path):
272 try:
271 try:
273 return self._fileinfo(path)[1]
272 return self._fileinfo(path)[1]
274 except error.LookupError:
273 except error.LookupError:
275 return ''
274 return ''
276
275
277 def sub(self, path, allowcreate=True):
276 def sub(self, path, allowcreate=True):
278 '''return a subrepo for the stored revision of path, never wdir()'''
277 '''return a subrepo for the stored revision of path, never wdir()'''
279 return subrepo.subrepo(self, path, allowcreate=allowcreate)
278 return subrepo.subrepo(self, path, allowcreate=allowcreate)
280
279
281 def nullsub(self, path, pctx):
280 def nullsub(self, path, pctx):
282 return subrepo.nullsubrepo(self, path, pctx)
281 return subrepo.nullsubrepo(self, path, pctx)
283
282
284 def workingsub(self, path):
283 def workingsub(self, path):
285 '''return a subrepo for the stored revision, or wdir if this is a wdir
284 '''return a subrepo for the stored revision, or wdir if this is a wdir
286 context.
285 context.
287 '''
286 '''
288 return subrepo.subrepo(self, path, allowwdir=True)
287 return subrepo.subrepo(self, path, allowwdir=True)
289
288
290 def match(self, pats=None, include=None, exclude=None, default='glob',
289 def match(self, pats=None, include=None, exclude=None, default='glob',
291 listsubrepos=False, badfn=None):
290 listsubrepos=False, badfn=None):
292 r = self._repo
291 r = self._repo
293 return matchmod.match(r.root, r.getcwd(), pats,
292 return matchmod.match(r.root, r.getcwd(), pats,
294 include, exclude, default,
293 include, exclude, default,
295 auditor=r.nofsauditor, ctx=self,
294 auditor=r.nofsauditor, ctx=self,
296 listsubrepos=listsubrepos, badfn=badfn)
295 listsubrepos=listsubrepos, badfn=badfn)
297
296
298 def diff(self, ctx2=None, match=None, changes=None, opts=None,
297 def diff(self, ctx2=None, match=None, changes=None, opts=None,
299 losedatafn=None, prefix='', relroot='', copy=None,
298 losedatafn=None, prefix='', relroot='', copy=None,
300 hunksfilterfn=None):
299 hunksfilterfn=None):
301 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
302 if ctx2 is None:
301 if ctx2 is None:
303 ctx2 = self.p1()
302 ctx2 = self.p1()
304 if ctx2 is not None:
303 if ctx2 is not None:
305 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
306
305
307 if isinstance(opts, mdiff.diffopts):
306 diffopts = opts
308 diffopts = opts
309 else:
310 diffopts = patch.diffopts(self._repo.ui, opts)
311 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
307 return patch.diff(self._repo, ctx2, self, match=match, changes=changes,
312 opts=diffopts, losedatafn=losedatafn, prefix=prefix,
308 opts=diffopts, losedatafn=losedatafn, prefix=prefix,
313 relroot=relroot, copy=copy,
309 relroot=relroot, copy=copy,
314 hunksfilterfn=hunksfilterfn)
310 hunksfilterfn=hunksfilterfn)
315
311
316 def dirs(self):
312 def dirs(self):
317 return self._manifest.dirs()
313 return self._manifest.dirs()
318
314
319 def hasdir(self, dir):
315 def hasdir(self, dir):
320 return self._manifest.hasdir(dir)
316 return self._manifest.hasdir(dir)
321
317
322 def status(self, other=None, match=None, listignored=False,
318 def status(self, other=None, match=None, listignored=False,
323 listclean=False, listunknown=False, listsubrepos=False):
319 listclean=False, listunknown=False, listsubrepos=False):
324 """return status of files between two nodes or node and working
320 """return status of files between two nodes or node and working
325 directory.
321 directory.
326
322
327 If other is None, compare this node with working directory.
323 If other is None, compare this node with working directory.
328
324
329 returns (modified, added, removed, deleted, unknown, ignored, clean)
325 returns (modified, added, removed, deleted, unknown, ignored, clean)
330 """
326 """
331
327
332 ctx1 = self
328 ctx1 = self
333 ctx2 = self._repo[other]
329 ctx2 = self._repo[other]
334
330
335 # This next code block is, admittedly, fragile logic that tests for
331 # This next code block is, admittedly, fragile logic that tests for
336 # reversing the contexts and wouldn't need to exist if it weren't for
332 # reversing the contexts and wouldn't need to exist if it weren't for
337 # the fast (and common) code path of comparing the working directory
333 # the fast (and common) code path of comparing the working directory
338 # with its first parent.
334 # with its first parent.
339 #
335 #
340 # What we're aiming for here is the ability to call:
336 # What we're aiming for here is the ability to call:
341 #
337 #
342 # workingctx.status(parentctx)
338 # workingctx.status(parentctx)
343 #
339 #
344 # If we always built the manifest for each context and compared those,
340 # If we always built the manifest for each context and compared those,
345 # then we'd be done. But the special case of the above call means we
341 # then we'd be done. But the special case of the above call means we
346 # just copy the manifest of the parent.
342 # just copy the manifest of the parent.
347 reversed = False
343 reversed = False
348 if (not isinstance(ctx1, changectx)
344 if (not isinstance(ctx1, changectx)
349 and isinstance(ctx2, changectx)):
345 and isinstance(ctx2, changectx)):
350 reversed = True
346 reversed = True
351 ctx1, ctx2 = ctx2, ctx1
347 ctx1, ctx2 = ctx2, ctx1
352
348
353 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
349 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
354 match = ctx2._matchstatus(ctx1, match)
350 match = ctx2._matchstatus(ctx1, match)
355 r = scmutil.status([], [], [], [], [], [], [])
351 r = scmutil.status([], [], [], [], [], [], [])
356 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
352 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
357 listunknown)
353 listunknown)
358
354
359 if reversed:
355 if reversed:
360 # Reverse added and removed. Clear deleted, unknown and ignored as
356 # Reverse added and removed. Clear deleted, unknown and ignored as
361 # these make no sense to reverse.
357 # these make no sense to reverse.
362 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
358 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
363 r.clean)
359 r.clean)
364
360
365 if listsubrepos:
361 if listsubrepos:
366 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
362 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
367 try:
363 try:
368 rev2 = ctx2.subrev(subpath)
364 rev2 = ctx2.subrev(subpath)
369 except KeyError:
365 except KeyError:
370 # A subrepo that existed in node1 was deleted between
366 # A subrepo that existed in node1 was deleted between
371 # node1 and node2 (inclusive). Thus, ctx2's substate
367 # node1 and node2 (inclusive). Thus, ctx2's substate
372 # won't contain that subpath. The best we can do ignore it.
368 # won't contain that subpath. The best we can do ignore it.
373 rev2 = None
369 rev2 = None
374 submatch = matchmod.subdirmatcher(subpath, match)
370 submatch = matchmod.subdirmatcher(subpath, match)
375 s = sub.status(rev2, match=submatch, ignored=listignored,
371 s = sub.status(rev2, match=submatch, ignored=listignored,
376 clean=listclean, unknown=listunknown,
372 clean=listclean, unknown=listunknown,
377 listsubrepos=True)
373 listsubrepos=True)
378 for rfiles, sfiles in zip(r, s):
374 for rfiles, sfiles in zip(r, s):
379 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
375 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
380
376
381 for l in r:
377 for l in r:
382 l.sort()
378 l.sort()
383
379
384 return r
380 return r
385
381
386 class changectx(basectx):
382 class changectx(basectx):
387 """A changecontext object makes access to data related to a particular
383 """A changecontext object makes access to data related to a particular
388 changeset convenient. It represents a read-only context already present in
384 changeset convenient. It represents a read-only context already present in
389 the repo."""
385 the repo."""
390 def __init__(self, repo, changeid='.'):
386 def __init__(self, repo, changeid='.'):
391 """changeid is a revision number, node, or tag"""
387 """changeid is a revision number, node, or tag"""
392 super(changectx, self).__init__(repo)
388 super(changectx, self).__init__(repo)
393
389
394 try:
390 try:
395 if isinstance(changeid, int):
391 if isinstance(changeid, int):
396 self._node = repo.changelog.node(changeid)
392 self._node = repo.changelog.node(changeid)
397 self._rev = changeid
393 self._rev = changeid
398 return
394 return
399 elif changeid == 'null':
395 elif changeid == 'null':
400 self._node = nullid
396 self._node = nullid
401 self._rev = nullrev
397 self._rev = nullrev
402 return
398 return
403 elif changeid == 'tip':
399 elif changeid == 'tip':
404 self._node = repo.changelog.tip()
400 self._node = repo.changelog.tip()
405 self._rev = repo.changelog.rev(self._node)
401 self._rev = repo.changelog.rev(self._node)
406 return
402 return
407 elif (changeid == '.'
403 elif (changeid == '.'
408 or repo.local() and changeid == repo.dirstate.p1()):
404 or repo.local() and changeid == repo.dirstate.p1()):
409 # this is a hack to delay/avoid loading obsmarkers
405 # this is a hack to delay/avoid loading obsmarkers
410 # when we know that '.' won't be hidden
406 # when we know that '.' won't be hidden
411 self._node = repo.dirstate.p1()
407 self._node = repo.dirstate.p1()
412 self._rev = repo.unfiltered().changelog.rev(self._node)
408 self._rev = repo.unfiltered().changelog.rev(self._node)
413 return
409 return
414 elif len(changeid) == 20:
410 elif len(changeid) == 20:
415 try:
411 try:
416 self._node = changeid
412 self._node = changeid
417 self._rev = repo.changelog.rev(changeid)
413 self._rev = repo.changelog.rev(changeid)
418 return
414 return
419 except error.FilteredLookupError:
415 except error.FilteredLookupError:
420 raise
416 raise
421 except LookupError:
417 except LookupError:
422 # check if it might have come from damaged dirstate
418 # check if it might have come from damaged dirstate
423 #
419 #
424 # XXX we could avoid the unfiltered if we had a recognizable
420 # XXX we could avoid the unfiltered if we had a recognizable
425 # exception for filtered changeset access
421 # exception for filtered changeset access
426 if (repo.local()
422 if (repo.local()
427 and changeid in repo.unfiltered().dirstate.parents()):
423 and changeid in repo.unfiltered().dirstate.parents()):
428 msg = _("working directory has unknown parent '%s'!")
424 msg = _("working directory has unknown parent '%s'!")
429 raise error.Abort(msg % short(changeid))
425 raise error.Abort(msg % short(changeid))
430 changeid = hex(changeid) # for the error message
426 changeid = hex(changeid) # for the error message
431
427
432 elif len(changeid) == 40:
428 elif len(changeid) == 40:
433 try:
429 try:
434 self._node = bin(changeid)
430 self._node = bin(changeid)
435 self._rev = repo.changelog.rev(self._node)
431 self._rev = repo.changelog.rev(self._node)
436 return
432 return
437 except error.FilteredLookupError:
433 except error.FilteredLookupError:
438 raise
434 raise
439 except (TypeError, LookupError):
435 except (TypeError, LookupError):
440 pass
436 pass
441
437
442 # lookup failed
438 # lookup failed
443 except (error.FilteredIndexError, error.FilteredLookupError):
439 except (error.FilteredIndexError, error.FilteredLookupError):
444 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
440 raise error.FilteredRepoLookupError(_("filtered revision '%s'")
445 % pycompat.bytestr(changeid))
441 % pycompat.bytestr(changeid))
446 except error.FilteredRepoLookupError:
442 except error.FilteredRepoLookupError:
447 raise
443 raise
448 except IndexError:
444 except IndexError:
449 pass
445 pass
450 raise error.RepoLookupError(
446 raise error.RepoLookupError(
451 _("unknown revision '%s'") % changeid)
447 _("unknown revision '%s'") % changeid)
452
448
453 def __hash__(self):
449 def __hash__(self):
454 try:
450 try:
455 return hash(self._rev)
451 return hash(self._rev)
456 except AttributeError:
452 except AttributeError:
457 return id(self)
453 return id(self)
458
454
459 def __nonzero__(self):
455 def __nonzero__(self):
460 return self._rev != nullrev
456 return self._rev != nullrev
461
457
462 __bool__ = __nonzero__
458 __bool__ = __nonzero__
463
459
464 @propertycache
460 @propertycache
465 def _changeset(self):
461 def _changeset(self):
466 return self._repo.changelog.changelogrevision(self.rev())
462 return self._repo.changelog.changelogrevision(self.rev())
467
463
468 @propertycache
464 @propertycache
469 def _manifest(self):
465 def _manifest(self):
470 return self._manifestctx.read()
466 return self._manifestctx.read()
471
467
472 @property
468 @property
473 def _manifestctx(self):
469 def _manifestctx(self):
474 return self._repo.manifestlog[self._changeset.manifest]
470 return self._repo.manifestlog[self._changeset.manifest]
475
471
476 @propertycache
472 @propertycache
477 def _manifestdelta(self):
473 def _manifestdelta(self):
478 return self._manifestctx.readdelta()
474 return self._manifestctx.readdelta()
479
475
480 @propertycache
476 @propertycache
481 def _parents(self):
477 def _parents(self):
482 repo = self._repo
478 repo = self._repo
483 p1, p2 = repo.changelog.parentrevs(self._rev)
479 p1, p2 = repo.changelog.parentrevs(self._rev)
484 if p2 == nullrev:
480 if p2 == nullrev:
485 return [changectx(repo, p1)]
481 return [changectx(repo, p1)]
486 return [changectx(repo, p1), changectx(repo, p2)]
482 return [changectx(repo, p1), changectx(repo, p2)]
487
483
488 def changeset(self):
484 def changeset(self):
489 c = self._changeset
485 c = self._changeset
490 return (
486 return (
491 c.manifest,
487 c.manifest,
492 c.user,
488 c.user,
493 c.date,
489 c.date,
494 c.files,
490 c.files,
495 c.description,
491 c.description,
496 c.extra,
492 c.extra,
497 )
493 )
498 def manifestnode(self):
494 def manifestnode(self):
499 return self._changeset.manifest
495 return self._changeset.manifest
500
496
501 def user(self):
497 def user(self):
502 return self._changeset.user
498 return self._changeset.user
503 def date(self):
499 def date(self):
504 return self._changeset.date
500 return self._changeset.date
505 def files(self):
501 def files(self):
506 return self._changeset.files
502 return self._changeset.files
507 def description(self):
503 def description(self):
508 return self._changeset.description
504 return self._changeset.description
509 def branch(self):
505 def branch(self):
510 return encoding.tolocal(self._changeset.extra.get("branch"))
506 return encoding.tolocal(self._changeset.extra.get("branch"))
511 def closesbranch(self):
507 def closesbranch(self):
512 return 'close' in self._changeset.extra
508 return 'close' in self._changeset.extra
513 def extra(self):
509 def extra(self):
514 """Return a dict of extra information."""
510 """Return a dict of extra information."""
515 return self._changeset.extra
511 return self._changeset.extra
516 def tags(self):
512 def tags(self):
517 """Return a list of byte tag names"""
513 """Return a list of byte tag names"""
518 return self._repo.nodetags(self._node)
514 return self._repo.nodetags(self._node)
519 def bookmarks(self):
515 def bookmarks(self):
520 """Return a list of byte bookmark names."""
516 """Return a list of byte bookmark names."""
521 return self._repo.nodebookmarks(self._node)
517 return self._repo.nodebookmarks(self._node)
522 def phase(self):
518 def phase(self):
523 return self._repo._phasecache.phase(self._repo, self._rev)
519 return self._repo._phasecache.phase(self._repo, self._rev)
524 def hidden(self):
520 def hidden(self):
525 return self._rev in repoview.filterrevs(self._repo, 'visible')
521 return self._rev in repoview.filterrevs(self._repo, 'visible')
526
522
527 def isinmemory(self):
523 def isinmemory(self):
528 return False
524 return False
529
525
530 def children(self):
526 def children(self):
531 """return list of changectx contexts for each child changeset.
527 """return list of changectx contexts for each child changeset.
532
528
533 This returns only the immediate child changesets. Use descendants() to
529 This returns only the immediate child changesets. Use descendants() to
534 recursively walk children.
530 recursively walk children.
535 """
531 """
536 c = self._repo.changelog.children(self._node)
532 c = self._repo.changelog.children(self._node)
537 return [changectx(self._repo, x) for x in c]
533 return [changectx(self._repo, x) for x in c]
538
534
539 def ancestors(self):
535 def ancestors(self):
540 for a in self._repo.changelog.ancestors([self._rev]):
536 for a in self._repo.changelog.ancestors([self._rev]):
541 yield changectx(self._repo, a)
537 yield changectx(self._repo, a)
542
538
543 def descendants(self):
539 def descendants(self):
544 """Recursively yield all children of the changeset.
540 """Recursively yield all children of the changeset.
545
541
546 For just the immediate children, use children()
542 For just the immediate children, use children()
547 """
543 """
548 for d in self._repo.changelog.descendants([self._rev]):
544 for d in self._repo.changelog.descendants([self._rev]):
549 yield changectx(self._repo, d)
545 yield changectx(self._repo, d)
550
546
551 def filectx(self, path, fileid=None, filelog=None):
547 def filectx(self, path, fileid=None, filelog=None):
552 """get a file context from this changeset"""
548 """get a file context from this changeset"""
553 if fileid is None:
549 if fileid is None:
554 fileid = self.filenode(path)
550 fileid = self.filenode(path)
555 return filectx(self._repo, path, fileid=fileid,
551 return filectx(self._repo, path, fileid=fileid,
556 changectx=self, filelog=filelog)
552 changectx=self, filelog=filelog)
557
553
558 def ancestor(self, c2, warn=False):
554 def ancestor(self, c2, warn=False):
559 """return the "best" ancestor context of self and c2
555 """return the "best" ancestor context of self and c2
560
556
561 If there are multiple candidates, it will show a message and check
557 If there are multiple candidates, it will show a message and check
562 merge.preferancestor configuration before falling back to the
558 merge.preferancestor configuration before falling back to the
563 revlog ancestor."""
559 revlog ancestor."""
564 # deal with workingctxs
560 # deal with workingctxs
565 n2 = c2._node
561 n2 = c2._node
566 if n2 is None:
562 if n2 is None:
567 n2 = c2._parents[0]._node
563 n2 = c2._parents[0]._node
568 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
564 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
569 if not cahs:
565 if not cahs:
570 anc = nullid
566 anc = nullid
571 elif len(cahs) == 1:
567 elif len(cahs) == 1:
572 anc = cahs[0]
568 anc = cahs[0]
573 else:
569 else:
574 # experimental config: merge.preferancestor
570 # experimental config: merge.preferancestor
575 for r in self._repo.ui.configlist('merge', 'preferancestor'):
571 for r in self._repo.ui.configlist('merge', 'preferancestor'):
576 try:
572 try:
577 ctx = scmutil.revsymbol(self._repo, r)
573 ctx = scmutil.revsymbol(self._repo, r)
578 except error.RepoLookupError:
574 except error.RepoLookupError:
579 continue
575 continue
580 anc = ctx.node()
576 anc = ctx.node()
581 if anc in cahs:
577 if anc in cahs:
582 break
578 break
583 else:
579 else:
584 anc = self._repo.changelog.ancestor(self._node, n2)
580 anc = self._repo.changelog.ancestor(self._node, n2)
585 if warn:
581 if warn:
586 self._repo.ui.status(
582 self._repo.ui.status(
587 (_("note: using %s as ancestor of %s and %s\n") %
583 (_("note: using %s as ancestor of %s and %s\n") %
588 (short(anc), short(self._node), short(n2))) +
584 (short(anc), short(self._node), short(n2))) +
589 ''.join(_(" alternatively, use --config "
585 ''.join(_(" alternatively, use --config "
590 "merge.preferancestor=%s\n") %
586 "merge.preferancestor=%s\n") %
591 short(n) for n in sorted(cahs) if n != anc))
587 short(n) for n in sorted(cahs) if n != anc))
592 return changectx(self._repo, anc)
588 return changectx(self._repo, anc)
593
589
594 def descendant(self, other):
590 def descendant(self, other):
595 """True if other is descendant of this changeset"""
591 """True if other is descendant of this changeset"""
596 return self._repo.changelog.descendant(self._rev, other._rev)
592 return self._repo.changelog.descendant(self._rev, other._rev)
597
593
598 def walk(self, match):
594 def walk(self, match):
599 '''Generates matching file names.'''
595 '''Generates matching file names.'''
600
596
601 # Wrap match.bad method to have message with nodeid
597 # Wrap match.bad method to have message with nodeid
602 def bad(fn, msg):
598 def bad(fn, msg):
603 # The manifest doesn't know about subrepos, so don't complain about
599 # The manifest doesn't know about subrepos, so don't complain about
604 # paths into valid subrepos.
600 # paths into valid subrepos.
605 if any(fn == s or fn.startswith(s + '/')
601 if any(fn == s or fn.startswith(s + '/')
606 for s in self.substate):
602 for s in self.substate):
607 return
603 return
608 match.bad(fn, _('no such file in rev %s') % self)
604 match.bad(fn, _('no such file in rev %s') % self)
609
605
610 m = matchmod.badmatch(match, bad)
606 m = matchmod.badmatch(match, bad)
611 return self._manifest.walk(m)
607 return self._manifest.walk(m)
612
608
613 def matches(self, match):
609 def matches(self, match):
614 return self.walk(match)
610 return self.walk(match)
615
611
616 class basefilectx(object):
612 class basefilectx(object):
617 """A filecontext object represents the common logic for its children:
613 """A filecontext object represents the common logic for its children:
618 filectx: read-only access to a filerevision that is already present
614 filectx: read-only access to a filerevision that is already present
619 in the repo,
615 in the repo,
620 workingfilectx: a filecontext that represents files from the working
616 workingfilectx: a filecontext that represents files from the working
621 directory,
617 directory,
622 memfilectx: a filecontext that represents files in-memory,
618 memfilectx: a filecontext that represents files in-memory,
623 overlayfilectx: duplicate another filecontext with some fields overridden.
619 overlayfilectx: duplicate another filecontext with some fields overridden.
624 """
620 """
625 @propertycache
621 @propertycache
626 def _filelog(self):
622 def _filelog(self):
627 return self._repo.file(self._path)
623 return self._repo.file(self._path)
628
624
629 @propertycache
625 @propertycache
630 def _changeid(self):
626 def _changeid(self):
631 if r'_changeid' in self.__dict__:
627 if r'_changeid' in self.__dict__:
632 return self._changeid
628 return self._changeid
633 elif r'_changectx' in self.__dict__:
629 elif r'_changectx' in self.__dict__:
634 return self._changectx.rev()
630 return self._changectx.rev()
635 elif r'_descendantrev' in self.__dict__:
631 elif r'_descendantrev' in self.__dict__:
636 # this file context was created from a revision with a known
632 # this file context was created from a revision with a known
637 # descendant, we can (lazily) correct for linkrev aliases
633 # descendant, we can (lazily) correct for linkrev aliases
638 return self._adjustlinkrev(self._descendantrev)
634 return self._adjustlinkrev(self._descendantrev)
639 else:
635 else:
640 return self._filelog.linkrev(self._filerev)
636 return self._filelog.linkrev(self._filerev)
641
637
642 @propertycache
638 @propertycache
643 def _filenode(self):
639 def _filenode(self):
644 if r'_fileid' in self.__dict__:
640 if r'_fileid' in self.__dict__:
645 return self._filelog.lookup(self._fileid)
641 return self._filelog.lookup(self._fileid)
646 else:
642 else:
647 return self._changectx.filenode(self._path)
643 return self._changectx.filenode(self._path)
648
644
649 @propertycache
645 @propertycache
650 def _filerev(self):
646 def _filerev(self):
651 return self._filelog.rev(self._filenode)
647 return self._filelog.rev(self._filenode)
652
648
653 @propertycache
649 @propertycache
654 def _repopath(self):
650 def _repopath(self):
655 return self._path
651 return self._path
656
652
657 def __nonzero__(self):
653 def __nonzero__(self):
658 try:
654 try:
659 self._filenode
655 self._filenode
660 return True
656 return True
661 except error.LookupError:
657 except error.LookupError:
662 # file is missing
658 # file is missing
663 return False
659 return False
664
660
665 __bool__ = __nonzero__
661 __bool__ = __nonzero__
666
662
667 def __bytes__(self):
663 def __bytes__(self):
668 try:
664 try:
669 return "%s@%s" % (self.path(), self._changectx)
665 return "%s@%s" % (self.path(), self._changectx)
670 except error.LookupError:
666 except error.LookupError:
671 return "%s@???" % self.path()
667 return "%s@???" % self.path()
672
668
673 __str__ = encoding.strmethod(__bytes__)
669 __str__ = encoding.strmethod(__bytes__)
674
670
675 def __repr__(self):
671 def __repr__(self):
676 return r"<%s %s>" % (type(self).__name__, str(self))
672 return r"<%s %s>" % (type(self).__name__, str(self))
677
673
678 def __hash__(self):
674 def __hash__(self):
679 try:
675 try:
680 return hash((self._path, self._filenode))
676 return hash((self._path, self._filenode))
681 except AttributeError:
677 except AttributeError:
682 return id(self)
678 return id(self)
683
679
684 def __eq__(self, other):
680 def __eq__(self, other):
685 try:
681 try:
686 return (type(self) == type(other) and self._path == other._path
682 return (type(self) == type(other) and self._path == other._path
687 and self._filenode == other._filenode)
683 and self._filenode == other._filenode)
688 except AttributeError:
684 except AttributeError:
689 return False
685 return False
690
686
691 def __ne__(self, other):
687 def __ne__(self, other):
692 return not (self == other)
688 return not (self == other)
693
689
694 def filerev(self):
690 def filerev(self):
695 return self._filerev
691 return self._filerev
696 def filenode(self):
692 def filenode(self):
697 return self._filenode
693 return self._filenode
698 @propertycache
694 @propertycache
699 def _flags(self):
695 def _flags(self):
700 return self._changectx.flags(self._path)
696 return self._changectx.flags(self._path)
701 def flags(self):
697 def flags(self):
702 return self._flags
698 return self._flags
703 def filelog(self):
699 def filelog(self):
704 return self._filelog
700 return self._filelog
705 def rev(self):
701 def rev(self):
706 return self._changeid
702 return self._changeid
707 def linkrev(self):
703 def linkrev(self):
708 return self._filelog.linkrev(self._filerev)
704 return self._filelog.linkrev(self._filerev)
709 def node(self):
705 def node(self):
710 return self._changectx.node()
706 return self._changectx.node()
711 def hex(self):
707 def hex(self):
712 return self._changectx.hex()
708 return self._changectx.hex()
713 def user(self):
709 def user(self):
714 return self._changectx.user()
710 return self._changectx.user()
715 def date(self):
711 def date(self):
716 return self._changectx.date()
712 return self._changectx.date()
717 def files(self):
713 def files(self):
718 return self._changectx.files()
714 return self._changectx.files()
719 def description(self):
715 def description(self):
720 return self._changectx.description()
716 return self._changectx.description()
721 def branch(self):
717 def branch(self):
722 return self._changectx.branch()
718 return self._changectx.branch()
723 def extra(self):
719 def extra(self):
724 return self._changectx.extra()
720 return self._changectx.extra()
725 def phase(self):
721 def phase(self):
726 return self._changectx.phase()
722 return self._changectx.phase()
727 def phasestr(self):
723 def phasestr(self):
728 return self._changectx.phasestr()
724 return self._changectx.phasestr()
729 def obsolete(self):
725 def obsolete(self):
730 return self._changectx.obsolete()
726 return self._changectx.obsolete()
731 def instabilities(self):
727 def instabilities(self):
732 return self._changectx.instabilities()
728 return self._changectx.instabilities()
733 def manifest(self):
729 def manifest(self):
734 return self._changectx.manifest()
730 return self._changectx.manifest()
735 def changectx(self):
731 def changectx(self):
736 return self._changectx
732 return self._changectx
737 def renamed(self):
733 def renamed(self):
738 return self._copied
734 return self._copied
739 def repo(self):
735 def repo(self):
740 return self._repo
736 return self._repo
741 def size(self):
737 def size(self):
742 return len(self.data())
738 return len(self.data())
743
739
744 def path(self):
740 def path(self):
745 return self._path
741 return self._path
746
742
747 def isbinary(self):
743 def isbinary(self):
748 try:
744 try:
749 return stringutil.binary(self.data())
745 return stringutil.binary(self.data())
750 except IOError:
746 except IOError:
751 return False
747 return False
752 def isexec(self):
748 def isexec(self):
753 return 'x' in self.flags()
749 return 'x' in self.flags()
754 def islink(self):
750 def islink(self):
755 return 'l' in self.flags()
751 return 'l' in self.flags()
756
752
757 def isabsent(self):
753 def isabsent(self):
758 """whether this filectx represents a file not in self._changectx
754 """whether this filectx represents a file not in self._changectx
759
755
760 This is mainly for merge code to detect change/delete conflicts. This is
756 This is mainly for merge code to detect change/delete conflicts. This is
761 expected to be True for all subclasses of basectx."""
757 expected to be True for all subclasses of basectx."""
762 return False
758 return False
763
759
764 _customcmp = False
760 _customcmp = False
765 def cmp(self, fctx):
761 def cmp(self, fctx):
766 """compare with other file context
762 """compare with other file context
767
763
768 returns True if different than fctx.
764 returns True if different than fctx.
769 """
765 """
770 if fctx._customcmp:
766 if fctx._customcmp:
771 return fctx.cmp(self)
767 return fctx.cmp(self)
772
768
773 if (fctx._filenode is None
769 if (fctx._filenode is None
774 and (self._repo._encodefilterpats
770 and (self._repo._encodefilterpats
775 # if file data starts with '\1\n', empty metadata block is
771 # if file data starts with '\1\n', empty metadata block is
776 # prepended, which adds 4 bytes to filelog.size().
772 # prepended, which adds 4 bytes to filelog.size().
777 or self.size() - 4 == fctx.size())
773 or self.size() - 4 == fctx.size())
778 or self.size() == fctx.size()):
774 or self.size() == fctx.size()):
779 return self._filelog.cmp(self._filenode, fctx.data())
775 return self._filelog.cmp(self._filenode, fctx.data())
780
776
781 return True
777 return True
782
778
783 def _adjustlinkrev(self, srcrev, inclusive=False):
779 def _adjustlinkrev(self, srcrev, inclusive=False):
784 """return the first ancestor of <srcrev> introducing <fnode>
780 """return the first ancestor of <srcrev> introducing <fnode>
785
781
786 If the linkrev of the file revision does not point to an ancestor of
782 If the linkrev of the file revision does not point to an ancestor of
787 srcrev, we'll walk down the ancestors until we find one introducing
783 srcrev, we'll walk down the ancestors until we find one introducing
788 this file revision.
784 this file revision.
789
785
790 :srcrev: the changeset revision we search ancestors from
786 :srcrev: the changeset revision we search ancestors from
791 :inclusive: if true, the src revision will also be checked
787 :inclusive: if true, the src revision will also be checked
792 """
788 """
793 repo = self._repo
789 repo = self._repo
794 cl = repo.unfiltered().changelog
790 cl = repo.unfiltered().changelog
795 mfl = repo.manifestlog
791 mfl = repo.manifestlog
796 # fetch the linkrev
792 # fetch the linkrev
797 lkr = self.linkrev()
793 lkr = self.linkrev()
798 # hack to reuse ancestor computation when searching for renames
794 # hack to reuse ancestor computation when searching for renames
799 memberanc = getattr(self, '_ancestrycontext', None)
795 memberanc = getattr(self, '_ancestrycontext', None)
800 iteranc = None
796 iteranc = None
801 if srcrev is None:
797 if srcrev is None:
802 # wctx case, used by workingfilectx during mergecopy
798 # wctx case, used by workingfilectx during mergecopy
803 revs = [p.rev() for p in self._repo[None].parents()]
799 revs = [p.rev() for p in self._repo[None].parents()]
804 inclusive = True # we skipped the real (revless) source
800 inclusive = True # we skipped the real (revless) source
805 else:
801 else:
806 revs = [srcrev]
802 revs = [srcrev]
807 if memberanc is None:
803 if memberanc is None:
808 memberanc = iteranc = cl.ancestors(revs, lkr,
804 memberanc = iteranc = cl.ancestors(revs, lkr,
809 inclusive=inclusive)
805 inclusive=inclusive)
810 # check if this linkrev is an ancestor of srcrev
806 # check if this linkrev is an ancestor of srcrev
811 if lkr not in memberanc:
807 if lkr not in memberanc:
812 if iteranc is None:
808 if iteranc is None:
813 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
809 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
814 fnode = self._filenode
810 fnode = self._filenode
815 path = self._path
811 path = self._path
816 for a in iteranc:
812 for a in iteranc:
817 ac = cl.read(a) # get changeset data (we avoid object creation)
813 ac = cl.read(a) # get changeset data (we avoid object creation)
818 if path in ac[3]: # checking the 'files' field.
814 if path in ac[3]: # checking the 'files' field.
819 # The file has been touched, check if the content is
815 # The file has been touched, check if the content is
820 # similar to the one we search for.
816 # similar to the one we search for.
821 if fnode == mfl[ac[0]].readfast().get(path):
817 if fnode == mfl[ac[0]].readfast().get(path):
822 return a
818 return a
823 # In theory, we should never get out of that loop without a result.
819 # In theory, we should never get out of that loop without a result.
824 # But if manifest uses a buggy file revision (not children of the
820 # But if manifest uses a buggy file revision (not children of the
825 # one it replaces) we could. Such a buggy situation will likely
821 # one it replaces) we could. Such a buggy situation will likely
826 # result is crash somewhere else at to some point.
822 # result is crash somewhere else at to some point.
827 return lkr
823 return lkr
828
824
829 def introrev(self):
825 def introrev(self):
830 """return the rev of the changeset which introduced this file revision
826 """return the rev of the changeset which introduced this file revision
831
827
832 This method is different from linkrev because it take into account the
828 This method is different from linkrev because it take into account the
833 changeset the filectx was created from. It ensures the returned
829 changeset the filectx was created from. It ensures the returned
834 revision is one of its ancestors. This prevents bugs from
830 revision is one of its ancestors. This prevents bugs from
835 'linkrev-shadowing' when a file revision is used by multiple
831 'linkrev-shadowing' when a file revision is used by multiple
836 changesets.
832 changesets.
837 """
833 """
838 lkr = self.linkrev()
834 lkr = self.linkrev()
839 attrs = vars(self)
835 attrs = vars(self)
840 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
836 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
841 if noctx or self.rev() == lkr:
837 if noctx or self.rev() == lkr:
842 return self.linkrev()
838 return self.linkrev()
843 return self._adjustlinkrev(self.rev(), inclusive=True)
839 return self._adjustlinkrev(self.rev(), inclusive=True)
844
840
845 def introfilectx(self):
841 def introfilectx(self):
846 """Return filectx having identical contents, but pointing to the
842 """Return filectx having identical contents, but pointing to the
847 changeset revision where this filectx was introduced"""
843 changeset revision where this filectx was introduced"""
848 introrev = self.introrev()
844 introrev = self.introrev()
849 if self.rev() == introrev:
845 if self.rev() == introrev:
850 return self
846 return self
851 return self.filectx(self.filenode(), changeid=introrev)
847 return self.filectx(self.filenode(), changeid=introrev)
852
848
853 def _parentfilectx(self, path, fileid, filelog):
849 def _parentfilectx(self, path, fileid, filelog):
854 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
850 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
855 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
851 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
856 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
852 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
857 # If self is associated with a changeset (probably explicitly
853 # If self is associated with a changeset (probably explicitly
858 # fed), ensure the created filectx is associated with a
854 # fed), ensure the created filectx is associated with a
859 # changeset that is an ancestor of self.changectx.
855 # changeset that is an ancestor of self.changectx.
860 # This lets us later use _adjustlinkrev to get a correct link.
856 # This lets us later use _adjustlinkrev to get a correct link.
861 fctx._descendantrev = self.rev()
857 fctx._descendantrev = self.rev()
862 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
858 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
863 elif r'_descendantrev' in vars(self):
859 elif r'_descendantrev' in vars(self):
864 # Otherwise propagate _descendantrev if we have one associated.
860 # Otherwise propagate _descendantrev if we have one associated.
865 fctx._descendantrev = self._descendantrev
861 fctx._descendantrev = self._descendantrev
866 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
862 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
867 return fctx
863 return fctx
868
864
869 def parents(self):
865 def parents(self):
870 _path = self._path
866 _path = self._path
871 fl = self._filelog
867 fl = self._filelog
872 parents = self._filelog.parents(self._filenode)
868 parents = self._filelog.parents(self._filenode)
873 pl = [(_path, node, fl) for node in parents if node != nullid]
869 pl = [(_path, node, fl) for node in parents if node != nullid]
874
870
875 r = fl.renamed(self._filenode)
871 r = fl.renamed(self._filenode)
876 if r:
872 if r:
877 # - In the simple rename case, both parent are nullid, pl is empty.
873 # - In the simple rename case, both parent are nullid, pl is empty.
878 # - In case of merge, only one of the parent is null id and should
874 # - In case of merge, only one of the parent is null id and should
879 # be replaced with the rename information. This parent is -always-
875 # be replaced with the rename information. This parent is -always-
880 # the first one.
876 # the first one.
881 #
877 #
882 # As null id have always been filtered out in the previous list
878 # As null id have always been filtered out in the previous list
883 # comprehension, inserting to 0 will always result in "replacing
879 # comprehension, inserting to 0 will always result in "replacing
884 # first nullid parent with rename information.
880 # first nullid parent with rename information.
885 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
881 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
886
882
887 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
883 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
888
884
889 def p1(self):
885 def p1(self):
890 return self.parents()[0]
886 return self.parents()[0]
891
887
892 def p2(self):
888 def p2(self):
893 p = self.parents()
889 p = self.parents()
894 if len(p) == 2:
890 if len(p) == 2:
895 return p[1]
891 return p[1]
896 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
892 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
897
893
898 def annotate(self, follow=False, skiprevs=None, diffopts=None):
894 def annotate(self, follow=False, skiprevs=None, diffopts=None):
899 """Returns a list of annotateline objects for each line in the file
895 """Returns a list of annotateline objects for each line in the file
900
896
901 - line.fctx is the filectx of the node where that line was last changed
897 - line.fctx is the filectx of the node where that line was last changed
902 - line.lineno is the line number at the first appearance in the managed
898 - line.lineno is the line number at the first appearance in the managed
903 file
899 file
904 - line.text is the data on that line (including newline character)
900 - line.text is the data on that line (including newline character)
905 """
901 """
906 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
902 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
907
903
908 def parents(f):
904 def parents(f):
909 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
905 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
910 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
906 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
911 # from the topmost introrev (= srcrev) down to p.linkrev() if it
907 # from the topmost introrev (= srcrev) down to p.linkrev() if it
912 # isn't an ancestor of the srcrev.
908 # isn't an ancestor of the srcrev.
913 f._changeid
909 f._changeid
914 pl = f.parents()
910 pl = f.parents()
915
911
916 # Don't return renamed parents if we aren't following.
912 # Don't return renamed parents if we aren't following.
917 if not follow:
913 if not follow:
918 pl = [p for p in pl if p.path() == f.path()]
914 pl = [p for p in pl if p.path() == f.path()]
919
915
920 # renamed filectx won't have a filelog yet, so set it
916 # renamed filectx won't have a filelog yet, so set it
921 # from the cache to save time
917 # from the cache to save time
922 for p in pl:
918 for p in pl:
923 if not r'_filelog' in p.__dict__:
919 if not r'_filelog' in p.__dict__:
924 p._filelog = getlog(p.path())
920 p._filelog = getlog(p.path())
925
921
926 return pl
922 return pl
927
923
928 # use linkrev to find the first changeset where self appeared
924 # use linkrev to find the first changeset where self appeared
929 base = self.introfilectx()
925 base = self.introfilectx()
930 if getattr(base, '_ancestrycontext', None) is None:
926 if getattr(base, '_ancestrycontext', None) is None:
931 cl = self._repo.changelog
927 cl = self._repo.changelog
932 if base.rev() is None:
928 if base.rev() is None:
933 # wctx is not inclusive, but works because _ancestrycontext
929 # wctx is not inclusive, but works because _ancestrycontext
934 # is used to test filelog revisions
930 # is used to test filelog revisions
935 ac = cl.ancestors([p.rev() for p in base.parents()],
931 ac = cl.ancestors([p.rev() for p in base.parents()],
936 inclusive=True)
932 inclusive=True)
937 else:
933 else:
938 ac = cl.ancestors([base.rev()], inclusive=True)
934 ac = cl.ancestors([base.rev()], inclusive=True)
939 base._ancestrycontext = ac
935 base._ancestrycontext = ac
940
936
941 return dagop.annotate(base, parents, skiprevs=skiprevs,
937 return dagop.annotate(base, parents, skiprevs=skiprevs,
942 diffopts=diffopts)
938 diffopts=diffopts)
943
939
944 def ancestors(self, followfirst=False):
940 def ancestors(self, followfirst=False):
945 visit = {}
941 visit = {}
946 c = self
942 c = self
947 if followfirst:
943 if followfirst:
948 cut = 1
944 cut = 1
949 else:
945 else:
950 cut = None
946 cut = None
951
947
952 while True:
948 while True:
953 for parent in c.parents()[:cut]:
949 for parent in c.parents()[:cut]:
954 visit[(parent.linkrev(), parent.filenode())] = parent
950 visit[(parent.linkrev(), parent.filenode())] = parent
955 if not visit:
951 if not visit:
956 break
952 break
957 c = visit.pop(max(visit))
953 c = visit.pop(max(visit))
958 yield c
954 yield c
959
955
960 def decodeddata(self):
956 def decodeddata(self):
961 """Returns `data()` after running repository decoding filters.
957 """Returns `data()` after running repository decoding filters.
962
958
963 This is often equivalent to how the data would be expressed on disk.
959 This is often equivalent to how the data would be expressed on disk.
964 """
960 """
965 return self._repo.wwritedata(self.path(), self.data())
961 return self._repo.wwritedata(self.path(), self.data())
966
962
967 class filectx(basefilectx):
963 class filectx(basefilectx):
968 """A filecontext object makes access to data related to a particular
964 """A filecontext object makes access to data related to a particular
969 filerevision convenient."""
965 filerevision convenient."""
970 def __init__(self, repo, path, changeid=None, fileid=None,
966 def __init__(self, repo, path, changeid=None, fileid=None,
971 filelog=None, changectx=None):
967 filelog=None, changectx=None):
972 """changeid can be a changeset revision, node, or tag.
968 """changeid can be a changeset revision, node, or tag.
973 fileid can be a file revision or node."""
969 fileid can be a file revision or node."""
974 self._repo = repo
970 self._repo = repo
975 self._path = path
971 self._path = path
976
972
977 assert (changeid is not None
973 assert (changeid is not None
978 or fileid is not None
974 or fileid is not None
979 or changectx is not None), \
975 or changectx is not None), \
980 ("bad args: changeid=%r, fileid=%r, changectx=%r"
976 ("bad args: changeid=%r, fileid=%r, changectx=%r"
981 % (changeid, fileid, changectx))
977 % (changeid, fileid, changectx))
982
978
983 if filelog is not None:
979 if filelog is not None:
984 self._filelog = filelog
980 self._filelog = filelog
985
981
986 if changeid is not None:
982 if changeid is not None:
987 self._changeid = changeid
983 self._changeid = changeid
988 if changectx is not None:
984 if changectx is not None:
989 self._changectx = changectx
985 self._changectx = changectx
990 if fileid is not None:
986 if fileid is not None:
991 self._fileid = fileid
987 self._fileid = fileid
992
988
993 @propertycache
989 @propertycache
994 def _changectx(self):
990 def _changectx(self):
995 try:
991 try:
996 return changectx(self._repo, self._changeid)
992 return changectx(self._repo, self._changeid)
997 except error.FilteredRepoLookupError:
993 except error.FilteredRepoLookupError:
998 # Linkrev may point to any revision in the repository. When the
994 # Linkrev may point to any revision in the repository. When the
999 # repository is filtered this may lead to `filectx` trying to build
995 # repository is filtered this may lead to `filectx` trying to build
1000 # `changectx` for filtered revision. In such case we fallback to
996 # `changectx` for filtered revision. In such case we fallback to
1001 # creating `changectx` on the unfiltered version of the reposition.
997 # creating `changectx` on the unfiltered version of the reposition.
1002 # This fallback should not be an issue because `changectx` from
998 # This fallback should not be an issue because `changectx` from
1003 # `filectx` are not used in complex operations that care about
999 # `filectx` are not used in complex operations that care about
1004 # filtering.
1000 # filtering.
1005 #
1001 #
1006 # This fallback is a cheap and dirty fix that prevent several
1002 # This fallback is a cheap and dirty fix that prevent several
1007 # crashes. It does not ensure the behavior is correct. However the
1003 # crashes. It does not ensure the behavior is correct. However the
1008 # behavior was not correct before filtering either and "incorrect
1004 # behavior was not correct before filtering either and "incorrect
1009 # behavior" is seen as better as "crash"
1005 # behavior" is seen as better as "crash"
1010 #
1006 #
1011 # Linkrevs have several serious troubles with filtering that are
1007 # Linkrevs have several serious troubles with filtering that are
1012 # complicated to solve. Proper handling of the issue here should be
1008 # complicated to solve. Proper handling of the issue here should be
1013 # considered when solving linkrev issue are on the table.
1009 # considered when solving linkrev issue are on the table.
1014 return changectx(self._repo.unfiltered(), self._changeid)
1010 return changectx(self._repo.unfiltered(), self._changeid)
1015
1011
1016 def filectx(self, fileid, changeid=None):
1012 def filectx(self, fileid, changeid=None):
1017 '''opens an arbitrary revision of the file without
1013 '''opens an arbitrary revision of the file without
1018 opening a new filelog'''
1014 opening a new filelog'''
1019 return filectx(self._repo, self._path, fileid=fileid,
1015 return filectx(self._repo, self._path, fileid=fileid,
1020 filelog=self._filelog, changeid=changeid)
1016 filelog=self._filelog, changeid=changeid)
1021
1017
1022 def rawdata(self):
1018 def rawdata(self):
1023 return self._filelog.revision(self._filenode, raw=True)
1019 return self._filelog.revision(self._filenode, raw=True)
1024
1020
1025 def rawflags(self):
1021 def rawflags(self):
1026 """low-level revlog flags"""
1022 """low-level revlog flags"""
1027 return self._filelog.flags(self._filerev)
1023 return self._filelog.flags(self._filerev)
1028
1024
1029 def data(self):
1025 def data(self):
1030 try:
1026 try:
1031 return self._filelog.read(self._filenode)
1027 return self._filelog.read(self._filenode)
1032 except error.CensoredNodeError:
1028 except error.CensoredNodeError:
1033 if self._repo.ui.config("censor", "policy") == "ignore":
1029 if self._repo.ui.config("censor", "policy") == "ignore":
1034 return ""
1030 return ""
1035 raise error.Abort(_("censored node: %s") % short(self._filenode),
1031 raise error.Abort(_("censored node: %s") % short(self._filenode),
1036 hint=_("set censor.policy to ignore errors"))
1032 hint=_("set censor.policy to ignore errors"))
1037
1033
1038 def size(self):
1034 def size(self):
1039 return self._filelog.size(self._filerev)
1035 return self._filelog.size(self._filerev)
1040
1036
1041 @propertycache
1037 @propertycache
1042 def _copied(self):
1038 def _copied(self):
1043 """check if file was actually renamed in this changeset revision
1039 """check if file was actually renamed in this changeset revision
1044
1040
1045 If rename logged in file revision, we report copy for changeset only
1041 If rename logged in file revision, we report copy for changeset only
1046 if file revisions linkrev points back to the changeset in question
1042 if file revisions linkrev points back to the changeset in question
1047 or both changeset parents contain different file revisions.
1043 or both changeset parents contain different file revisions.
1048 """
1044 """
1049
1045
1050 renamed = self._filelog.renamed(self._filenode)
1046 renamed = self._filelog.renamed(self._filenode)
1051 if not renamed:
1047 if not renamed:
1052 return renamed
1048 return renamed
1053
1049
1054 if self.rev() == self.linkrev():
1050 if self.rev() == self.linkrev():
1055 return renamed
1051 return renamed
1056
1052
1057 name = self.path()
1053 name = self.path()
1058 fnode = self._filenode
1054 fnode = self._filenode
1059 for p in self._changectx.parents():
1055 for p in self._changectx.parents():
1060 try:
1056 try:
1061 if fnode == p.filenode(name):
1057 if fnode == p.filenode(name):
1062 return None
1058 return None
1063 except error.LookupError:
1059 except error.LookupError:
1064 pass
1060 pass
1065 return renamed
1061 return renamed
1066
1062
1067 def children(self):
1063 def children(self):
1068 # hard for renames
1064 # hard for renames
1069 c = self._filelog.children(self._filenode)
1065 c = self._filelog.children(self._filenode)
1070 return [filectx(self._repo, self._path, fileid=x,
1066 return [filectx(self._repo, self._path, fileid=x,
1071 filelog=self._filelog) for x in c]
1067 filelog=self._filelog) for x in c]
1072
1068
1073 class committablectx(basectx):
1069 class committablectx(basectx):
1074 """A committablectx object provides common functionality for a context that
1070 """A committablectx object provides common functionality for a context that
1075 wants the ability to commit, e.g. workingctx or memctx."""
1071 wants the ability to commit, e.g. workingctx or memctx."""
1076 def __init__(self, repo, text="", user=None, date=None, extra=None,
1072 def __init__(self, repo, text="", user=None, date=None, extra=None,
1077 changes=None):
1073 changes=None):
1078 super(committablectx, self).__init__(repo)
1074 super(committablectx, self).__init__(repo)
1079 self._rev = None
1075 self._rev = None
1080 self._node = None
1076 self._node = None
1081 self._text = text
1077 self._text = text
1082 if date:
1078 if date:
1083 self._date = dateutil.parsedate(date)
1079 self._date = dateutil.parsedate(date)
1084 if user:
1080 if user:
1085 self._user = user
1081 self._user = user
1086 if changes:
1082 if changes:
1087 self._status = changes
1083 self._status = changes
1088
1084
1089 self._extra = {}
1085 self._extra = {}
1090 if extra:
1086 if extra:
1091 self._extra = extra.copy()
1087 self._extra = extra.copy()
1092 if 'branch' not in self._extra:
1088 if 'branch' not in self._extra:
1093 try:
1089 try:
1094 branch = encoding.fromlocal(self._repo.dirstate.branch())
1090 branch = encoding.fromlocal(self._repo.dirstate.branch())
1095 except UnicodeDecodeError:
1091 except UnicodeDecodeError:
1096 raise error.Abort(_('branch name not in UTF-8!'))
1092 raise error.Abort(_('branch name not in UTF-8!'))
1097 self._extra['branch'] = branch
1093 self._extra['branch'] = branch
1098 if self._extra['branch'] == '':
1094 if self._extra['branch'] == '':
1099 self._extra['branch'] = 'default'
1095 self._extra['branch'] = 'default'
1100
1096
1101 def __bytes__(self):
1097 def __bytes__(self):
1102 return bytes(self._parents[0]) + "+"
1098 return bytes(self._parents[0]) + "+"
1103
1099
1104 __str__ = encoding.strmethod(__bytes__)
1100 __str__ = encoding.strmethod(__bytes__)
1105
1101
1106 def __nonzero__(self):
1102 def __nonzero__(self):
1107 return True
1103 return True
1108
1104
1109 __bool__ = __nonzero__
1105 __bool__ = __nonzero__
1110
1106
1111 def _buildflagfunc(self):
1107 def _buildflagfunc(self):
1112 # Create a fallback function for getting file flags when the
1108 # Create a fallback function for getting file flags when the
1113 # filesystem doesn't support them
1109 # filesystem doesn't support them
1114
1110
1115 copiesget = self._repo.dirstate.copies().get
1111 copiesget = self._repo.dirstate.copies().get
1116 parents = self.parents()
1112 parents = self.parents()
1117 if len(parents) < 2:
1113 if len(parents) < 2:
1118 # when we have one parent, it's easy: copy from parent
1114 # when we have one parent, it's easy: copy from parent
1119 man = parents[0].manifest()
1115 man = parents[0].manifest()
1120 def func(f):
1116 def func(f):
1121 f = copiesget(f, f)
1117 f = copiesget(f, f)
1122 return man.flags(f)
1118 return man.flags(f)
1123 else:
1119 else:
1124 # merges are tricky: we try to reconstruct the unstored
1120 # merges are tricky: we try to reconstruct the unstored
1125 # result from the merge (issue1802)
1121 # result from the merge (issue1802)
1126 p1, p2 = parents
1122 p1, p2 = parents
1127 pa = p1.ancestor(p2)
1123 pa = p1.ancestor(p2)
1128 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1124 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1129
1125
1130 def func(f):
1126 def func(f):
1131 f = copiesget(f, f) # may be wrong for merges with copies
1127 f = copiesget(f, f) # may be wrong for merges with copies
1132 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1128 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1133 if fl1 == fl2:
1129 if fl1 == fl2:
1134 return fl1
1130 return fl1
1135 if fl1 == fla:
1131 if fl1 == fla:
1136 return fl2
1132 return fl2
1137 if fl2 == fla:
1133 if fl2 == fla:
1138 return fl1
1134 return fl1
1139 return '' # punt for conflicts
1135 return '' # punt for conflicts
1140
1136
1141 return func
1137 return func
1142
1138
1143 @propertycache
1139 @propertycache
1144 def _flagfunc(self):
1140 def _flagfunc(self):
1145 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1141 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1146
1142
1147 @propertycache
1143 @propertycache
1148 def _status(self):
1144 def _status(self):
1149 return self._repo.status()
1145 return self._repo.status()
1150
1146
1151 @propertycache
1147 @propertycache
1152 def _user(self):
1148 def _user(self):
1153 return self._repo.ui.username()
1149 return self._repo.ui.username()
1154
1150
1155 @propertycache
1151 @propertycache
1156 def _date(self):
1152 def _date(self):
1157 ui = self._repo.ui
1153 ui = self._repo.ui
1158 date = ui.configdate('devel', 'default-date')
1154 date = ui.configdate('devel', 'default-date')
1159 if date is None:
1155 if date is None:
1160 date = dateutil.makedate()
1156 date = dateutil.makedate()
1161 return date
1157 return date
1162
1158
1163 def subrev(self, subpath):
1159 def subrev(self, subpath):
1164 return None
1160 return None
1165
1161
1166 def manifestnode(self):
1162 def manifestnode(self):
1167 return None
1163 return None
1168 def user(self):
1164 def user(self):
1169 return self._user or self._repo.ui.username()
1165 return self._user or self._repo.ui.username()
1170 def date(self):
1166 def date(self):
1171 return self._date
1167 return self._date
1172 def description(self):
1168 def description(self):
1173 return self._text
1169 return self._text
1174 def files(self):
1170 def files(self):
1175 return sorted(self._status.modified + self._status.added +
1171 return sorted(self._status.modified + self._status.added +
1176 self._status.removed)
1172 self._status.removed)
1177
1173
1178 def modified(self):
1174 def modified(self):
1179 return self._status.modified
1175 return self._status.modified
1180 def added(self):
1176 def added(self):
1181 return self._status.added
1177 return self._status.added
1182 def removed(self):
1178 def removed(self):
1183 return self._status.removed
1179 return self._status.removed
1184 def deleted(self):
1180 def deleted(self):
1185 return self._status.deleted
1181 return self._status.deleted
1186 def branch(self):
1182 def branch(self):
1187 return encoding.tolocal(self._extra['branch'])
1183 return encoding.tolocal(self._extra['branch'])
1188 def closesbranch(self):
1184 def closesbranch(self):
1189 return 'close' in self._extra
1185 return 'close' in self._extra
1190 def extra(self):
1186 def extra(self):
1191 return self._extra
1187 return self._extra
1192
1188
1193 def isinmemory(self):
1189 def isinmemory(self):
1194 return False
1190 return False
1195
1191
1196 def tags(self):
1192 def tags(self):
1197 return []
1193 return []
1198
1194
1199 def bookmarks(self):
1195 def bookmarks(self):
1200 b = []
1196 b = []
1201 for p in self.parents():
1197 for p in self.parents():
1202 b.extend(p.bookmarks())
1198 b.extend(p.bookmarks())
1203 return b
1199 return b
1204
1200
1205 def phase(self):
1201 def phase(self):
1206 phase = phases.draft # default phase to draft
1202 phase = phases.draft # default phase to draft
1207 for p in self.parents():
1203 for p in self.parents():
1208 phase = max(phase, p.phase())
1204 phase = max(phase, p.phase())
1209 return phase
1205 return phase
1210
1206
1211 def hidden(self):
1207 def hidden(self):
1212 return False
1208 return False
1213
1209
1214 def children(self):
1210 def children(self):
1215 return []
1211 return []
1216
1212
1217 def flags(self, path):
1213 def flags(self, path):
1218 if r'_manifest' in self.__dict__:
1214 if r'_manifest' in self.__dict__:
1219 try:
1215 try:
1220 return self._manifest.flags(path)
1216 return self._manifest.flags(path)
1221 except KeyError:
1217 except KeyError:
1222 return ''
1218 return ''
1223
1219
1224 try:
1220 try:
1225 return self._flagfunc(path)
1221 return self._flagfunc(path)
1226 except OSError:
1222 except OSError:
1227 return ''
1223 return ''
1228
1224
1229 def ancestor(self, c2):
1225 def ancestor(self, c2):
1230 """return the "best" ancestor context of self and c2"""
1226 """return the "best" ancestor context of self and c2"""
1231 return self._parents[0].ancestor(c2) # punt on two parents for now
1227 return self._parents[0].ancestor(c2) # punt on two parents for now
1232
1228
1233 def walk(self, match):
1229 def walk(self, match):
1234 '''Generates matching file names.'''
1230 '''Generates matching file names.'''
1235 return sorted(self._repo.dirstate.walk(match,
1231 return sorted(self._repo.dirstate.walk(match,
1236 subrepos=sorted(self.substate),
1232 subrepos=sorted(self.substate),
1237 unknown=True, ignored=False))
1233 unknown=True, ignored=False))
1238
1234
1239 def matches(self, match):
1235 def matches(self, match):
1240 ds = self._repo.dirstate
1236 ds = self._repo.dirstate
1241 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1237 return sorted(f for f in ds.matches(match) if ds[f] != 'r')
1242
1238
1243 def ancestors(self):
1239 def ancestors(self):
1244 for p in self._parents:
1240 for p in self._parents:
1245 yield p
1241 yield p
1246 for a in self._repo.changelog.ancestors(
1242 for a in self._repo.changelog.ancestors(
1247 [p.rev() for p in self._parents]):
1243 [p.rev() for p in self._parents]):
1248 yield changectx(self._repo, a)
1244 yield changectx(self._repo, a)
1249
1245
1250 def markcommitted(self, node):
1246 def markcommitted(self, node):
1251 """Perform post-commit cleanup necessary after committing this ctx
1247 """Perform post-commit cleanup necessary after committing this ctx
1252
1248
1253 Specifically, this updates backing stores this working context
1249 Specifically, this updates backing stores this working context
1254 wraps to reflect the fact that the changes reflected by this
1250 wraps to reflect the fact that the changes reflected by this
1255 workingctx have been committed. For example, it marks
1251 workingctx have been committed. For example, it marks
1256 modified and added files as normal in the dirstate.
1252 modified and added files as normal in the dirstate.
1257
1253
1258 """
1254 """
1259
1255
1260 with self._repo.dirstate.parentchange():
1256 with self._repo.dirstate.parentchange():
1261 for f in self.modified() + self.added():
1257 for f in self.modified() + self.added():
1262 self._repo.dirstate.normal(f)
1258 self._repo.dirstate.normal(f)
1263 for f in self.removed():
1259 for f in self.removed():
1264 self._repo.dirstate.drop(f)
1260 self._repo.dirstate.drop(f)
1265 self._repo.dirstate.setparents(node)
1261 self._repo.dirstate.setparents(node)
1266
1262
1267 # write changes out explicitly, because nesting wlock at
1263 # write changes out explicitly, because nesting wlock at
1268 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1264 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1269 # from immediately doing so for subsequent changing files
1265 # from immediately doing so for subsequent changing files
1270 self._repo.dirstate.write(self._repo.currenttransaction())
1266 self._repo.dirstate.write(self._repo.currenttransaction())
1271
1267
1272 def dirty(self, missing=False, merge=True, branch=True):
1268 def dirty(self, missing=False, merge=True, branch=True):
1273 return False
1269 return False
1274
1270
1275 class workingctx(committablectx):
1271 class workingctx(committablectx):
1276 """A workingctx object makes access to data related to
1272 """A workingctx object makes access to data related to
1277 the current working directory convenient.
1273 the current working directory convenient.
1278 date - any valid date string or (unixtime, offset), or None.
1274 date - any valid date string or (unixtime, offset), or None.
1279 user - username string, or None.
1275 user - username string, or None.
1280 extra - a dictionary of extra values, or None.
1276 extra - a dictionary of extra values, or None.
1281 changes - a list of file lists as returned by localrepo.status()
1277 changes - a list of file lists as returned by localrepo.status()
1282 or None to use the repository status.
1278 or None to use the repository status.
1283 """
1279 """
1284 def __init__(self, repo, text="", user=None, date=None, extra=None,
1280 def __init__(self, repo, text="", user=None, date=None, extra=None,
1285 changes=None):
1281 changes=None):
1286 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1282 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1287
1283
1288 def __iter__(self):
1284 def __iter__(self):
1289 d = self._repo.dirstate
1285 d = self._repo.dirstate
1290 for f in d:
1286 for f in d:
1291 if d[f] != 'r':
1287 if d[f] != 'r':
1292 yield f
1288 yield f
1293
1289
1294 def __contains__(self, key):
1290 def __contains__(self, key):
1295 return self._repo.dirstate[key] not in "?r"
1291 return self._repo.dirstate[key] not in "?r"
1296
1292
1297 def hex(self):
1293 def hex(self):
1298 return hex(wdirid)
1294 return hex(wdirid)
1299
1295
1300 @propertycache
1296 @propertycache
1301 def _parents(self):
1297 def _parents(self):
1302 p = self._repo.dirstate.parents()
1298 p = self._repo.dirstate.parents()
1303 if p[1] == nullid:
1299 if p[1] == nullid:
1304 p = p[:-1]
1300 p = p[:-1]
1305 return [changectx(self._repo, x) for x in p]
1301 return [changectx(self._repo, x) for x in p]
1306
1302
1307 def _fileinfo(self, path):
1303 def _fileinfo(self, path):
1308 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1304 # populate __dict__['_manifest'] as workingctx has no _manifestdelta
1309 self._manifest
1305 self._manifest
1310 return super(workingctx, self)._fileinfo(path)
1306 return super(workingctx, self)._fileinfo(path)
1311
1307
1312 def filectx(self, path, filelog=None):
1308 def filectx(self, path, filelog=None):
1313 """get a file context from the working directory"""
1309 """get a file context from the working directory"""
1314 return workingfilectx(self._repo, path, workingctx=self,
1310 return workingfilectx(self._repo, path, workingctx=self,
1315 filelog=filelog)
1311 filelog=filelog)
1316
1312
1317 def dirty(self, missing=False, merge=True, branch=True):
1313 def dirty(self, missing=False, merge=True, branch=True):
1318 "check whether a working directory is modified"
1314 "check whether a working directory is modified"
1319 # check subrepos first
1315 # check subrepos first
1320 for s in sorted(self.substate):
1316 for s in sorted(self.substate):
1321 if self.sub(s).dirty(missing=missing):
1317 if self.sub(s).dirty(missing=missing):
1322 return True
1318 return True
1323 # check current working dir
1319 # check current working dir
1324 return ((merge and self.p2()) or
1320 return ((merge and self.p2()) or
1325 (branch and self.branch() != self.p1().branch()) or
1321 (branch and self.branch() != self.p1().branch()) or
1326 self.modified() or self.added() or self.removed() or
1322 self.modified() or self.added() or self.removed() or
1327 (missing and self.deleted()))
1323 (missing and self.deleted()))
1328
1324
1329 def add(self, list, prefix=""):
1325 def add(self, list, prefix=""):
1330 with self._repo.wlock():
1326 with self._repo.wlock():
1331 ui, ds = self._repo.ui, self._repo.dirstate
1327 ui, ds = self._repo.ui, self._repo.dirstate
1332 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1328 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1333 rejected = []
1329 rejected = []
1334 lstat = self._repo.wvfs.lstat
1330 lstat = self._repo.wvfs.lstat
1335 for f in list:
1331 for f in list:
1336 # ds.pathto() returns an absolute file when this is invoked from
1332 # ds.pathto() returns an absolute file when this is invoked from
1337 # the keyword extension. That gets flagged as non-portable on
1333 # the keyword extension. That gets flagged as non-portable on
1338 # Windows, since it contains the drive letter and colon.
1334 # Windows, since it contains the drive letter and colon.
1339 scmutil.checkportable(ui, os.path.join(prefix, f))
1335 scmutil.checkportable(ui, os.path.join(prefix, f))
1340 try:
1336 try:
1341 st = lstat(f)
1337 st = lstat(f)
1342 except OSError:
1338 except OSError:
1343 ui.warn(_("%s does not exist!\n") % uipath(f))
1339 ui.warn(_("%s does not exist!\n") % uipath(f))
1344 rejected.append(f)
1340 rejected.append(f)
1345 continue
1341 continue
1346 if st.st_size > 10000000:
1342 if st.st_size > 10000000:
1347 ui.warn(_("%s: up to %d MB of RAM may be required "
1343 ui.warn(_("%s: up to %d MB of RAM may be required "
1348 "to manage this file\n"
1344 "to manage this file\n"
1349 "(use 'hg revert %s' to cancel the "
1345 "(use 'hg revert %s' to cancel the "
1350 "pending addition)\n")
1346 "pending addition)\n")
1351 % (f, 3 * st.st_size // 1000000, uipath(f)))
1347 % (f, 3 * st.st_size // 1000000, uipath(f)))
1352 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1348 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1353 ui.warn(_("%s not added: only files and symlinks "
1349 ui.warn(_("%s not added: only files and symlinks "
1354 "supported currently\n") % uipath(f))
1350 "supported currently\n") % uipath(f))
1355 rejected.append(f)
1351 rejected.append(f)
1356 elif ds[f] in 'amn':
1352 elif ds[f] in 'amn':
1357 ui.warn(_("%s already tracked!\n") % uipath(f))
1353 ui.warn(_("%s already tracked!\n") % uipath(f))
1358 elif ds[f] == 'r':
1354 elif ds[f] == 'r':
1359 ds.normallookup(f)
1355 ds.normallookup(f)
1360 else:
1356 else:
1361 ds.add(f)
1357 ds.add(f)
1362 return rejected
1358 return rejected
1363
1359
1364 def forget(self, files, prefix=""):
1360 def forget(self, files, prefix=""):
1365 with self._repo.wlock():
1361 with self._repo.wlock():
1366 ds = self._repo.dirstate
1362 ds = self._repo.dirstate
1367 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1363 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1368 rejected = []
1364 rejected = []
1369 for f in files:
1365 for f in files:
1370 if f not in self._repo.dirstate:
1366 if f not in self._repo.dirstate:
1371 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1367 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1372 rejected.append(f)
1368 rejected.append(f)
1373 elif self._repo.dirstate[f] != 'a':
1369 elif self._repo.dirstate[f] != 'a':
1374 self._repo.dirstate.remove(f)
1370 self._repo.dirstate.remove(f)
1375 else:
1371 else:
1376 self._repo.dirstate.drop(f)
1372 self._repo.dirstate.drop(f)
1377 return rejected
1373 return rejected
1378
1374
1379 def undelete(self, list):
1375 def undelete(self, list):
1380 pctxs = self.parents()
1376 pctxs = self.parents()
1381 with self._repo.wlock():
1377 with self._repo.wlock():
1382 ds = self._repo.dirstate
1378 ds = self._repo.dirstate
1383 for f in list:
1379 for f in list:
1384 if self._repo.dirstate[f] != 'r':
1380 if self._repo.dirstate[f] != 'r':
1385 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1381 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1386 else:
1382 else:
1387 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1383 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1388 t = fctx.data()
1384 t = fctx.data()
1389 self._repo.wwrite(f, t, fctx.flags())
1385 self._repo.wwrite(f, t, fctx.flags())
1390 self._repo.dirstate.normal(f)
1386 self._repo.dirstate.normal(f)
1391
1387
1392 def copy(self, source, dest):
1388 def copy(self, source, dest):
1393 try:
1389 try:
1394 st = self._repo.wvfs.lstat(dest)
1390 st = self._repo.wvfs.lstat(dest)
1395 except OSError as err:
1391 except OSError as err:
1396 if err.errno != errno.ENOENT:
1392 if err.errno != errno.ENOENT:
1397 raise
1393 raise
1398 self._repo.ui.warn(_("%s does not exist!\n")
1394 self._repo.ui.warn(_("%s does not exist!\n")
1399 % self._repo.dirstate.pathto(dest))
1395 % self._repo.dirstate.pathto(dest))
1400 return
1396 return
1401 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1397 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1402 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1398 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1403 "symbolic link\n")
1399 "symbolic link\n")
1404 % self._repo.dirstate.pathto(dest))
1400 % self._repo.dirstate.pathto(dest))
1405 else:
1401 else:
1406 with self._repo.wlock():
1402 with self._repo.wlock():
1407 if self._repo.dirstate[dest] in '?':
1403 if self._repo.dirstate[dest] in '?':
1408 self._repo.dirstate.add(dest)
1404 self._repo.dirstate.add(dest)
1409 elif self._repo.dirstate[dest] in 'r':
1405 elif self._repo.dirstate[dest] in 'r':
1410 self._repo.dirstate.normallookup(dest)
1406 self._repo.dirstate.normallookup(dest)
1411 self._repo.dirstate.copy(source, dest)
1407 self._repo.dirstate.copy(source, dest)
1412
1408
1413 def match(self, pats=None, include=None, exclude=None, default='glob',
1409 def match(self, pats=None, include=None, exclude=None, default='glob',
1414 listsubrepos=False, badfn=None):
1410 listsubrepos=False, badfn=None):
1415 r = self._repo
1411 r = self._repo
1416
1412
1417 # Only a case insensitive filesystem needs magic to translate user input
1413 # Only a case insensitive filesystem needs magic to translate user input
1418 # to actual case in the filesystem.
1414 # to actual case in the filesystem.
1419 icasefs = not util.fscasesensitive(r.root)
1415 icasefs = not util.fscasesensitive(r.root)
1420 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1416 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1421 default, auditor=r.auditor, ctx=self,
1417 default, auditor=r.auditor, ctx=self,
1422 listsubrepos=listsubrepos, badfn=badfn,
1418 listsubrepos=listsubrepos, badfn=badfn,
1423 icasefs=icasefs)
1419 icasefs=icasefs)
1424
1420
1425 def _filtersuspectsymlink(self, files):
1421 def _filtersuspectsymlink(self, files):
1426 if not files or self._repo.dirstate._checklink:
1422 if not files or self._repo.dirstate._checklink:
1427 return files
1423 return files
1428
1424
1429 # Symlink placeholders may get non-symlink-like contents
1425 # Symlink placeholders may get non-symlink-like contents
1430 # via user error or dereferencing by NFS or Samba servers,
1426 # via user error or dereferencing by NFS or Samba servers,
1431 # so we filter out any placeholders that don't look like a
1427 # so we filter out any placeholders that don't look like a
1432 # symlink
1428 # symlink
1433 sane = []
1429 sane = []
1434 for f in files:
1430 for f in files:
1435 if self.flags(f) == 'l':
1431 if self.flags(f) == 'l':
1436 d = self[f].data()
1432 d = self[f].data()
1437 if (d == '' or len(d) >= 1024 or '\n' in d
1433 if (d == '' or len(d) >= 1024 or '\n' in d
1438 or stringutil.binary(d)):
1434 or stringutil.binary(d)):
1439 self._repo.ui.debug('ignoring suspect symlink placeholder'
1435 self._repo.ui.debug('ignoring suspect symlink placeholder'
1440 ' "%s"\n' % f)
1436 ' "%s"\n' % f)
1441 continue
1437 continue
1442 sane.append(f)
1438 sane.append(f)
1443 return sane
1439 return sane
1444
1440
1445 def _checklookup(self, files):
1441 def _checklookup(self, files):
1446 # check for any possibly clean files
1442 # check for any possibly clean files
1447 if not files:
1443 if not files:
1448 return [], [], []
1444 return [], [], []
1449
1445
1450 modified = []
1446 modified = []
1451 deleted = []
1447 deleted = []
1452 fixup = []
1448 fixup = []
1453 pctx = self._parents[0]
1449 pctx = self._parents[0]
1454 # do a full compare of any files that might have changed
1450 # do a full compare of any files that might have changed
1455 for f in sorted(files):
1451 for f in sorted(files):
1456 try:
1452 try:
1457 # This will return True for a file that got replaced by a
1453 # This will return True for a file that got replaced by a
1458 # directory in the interim, but fixing that is pretty hard.
1454 # directory in the interim, but fixing that is pretty hard.
1459 if (f not in pctx or self.flags(f) != pctx.flags(f)
1455 if (f not in pctx or self.flags(f) != pctx.flags(f)
1460 or pctx[f].cmp(self[f])):
1456 or pctx[f].cmp(self[f])):
1461 modified.append(f)
1457 modified.append(f)
1462 else:
1458 else:
1463 fixup.append(f)
1459 fixup.append(f)
1464 except (IOError, OSError):
1460 except (IOError, OSError):
1465 # A file become inaccessible in between? Mark it as deleted,
1461 # A file become inaccessible in between? Mark it as deleted,
1466 # matching dirstate behavior (issue5584).
1462 # matching dirstate behavior (issue5584).
1467 # The dirstate has more complex behavior around whether a
1463 # The dirstate has more complex behavior around whether a
1468 # missing file matches a directory, etc, but we don't need to
1464 # missing file matches a directory, etc, but we don't need to
1469 # bother with that: if f has made it to this point, we're sure
1465 # bother with that: if f has made it to this point, we're sure
1470 # it's in the dirstate.
1466 # it's in the dirstate.
1471 deleted.append(f)
1467 deleted.append(f)
1472
1468
1473 return modified, deleted, fixup
1469 return modified, deleted, fixup
1474
1470
1475 def _poststatusfixup(self, status, fixup):
1471 def _poststatusfixup(self, status, fixup):
1476 """update dirstate for files that are actually clean"""
1472 """update dirstate for files that are actually clean"""
1477 poststatus = self._repo.postdsstatus()
1473 poststatus = self._repo.postdsstatus()
1478 if fixup or poststatus:
1474 if fixup or poststatus:
1479 try:
1475 try:
1480 oldid = self._repo.dirstate.identity()
1476 oldid = self._repo.dirstate.identity()
1481
1477
1482 # updating the dirstate is optional
1478 # updating the dirstate is optional
1483 # so we don't wait on the lock
1479 # so we don't wait on the lock
1484 # wlock can invalidate the dirstate, so cache normal _after_
1480 # wlock can invalidate the dirstate, so cache normal _after_
1485 # taking the lock
1481 # taking the lock
1486 with self._repo.wlock(False):
1482 with self._repo.wlock(False):
1487 if self._repo.dirstate.identity() == oldid:
1483 if self._repo.dirstate.identity() == oldid:
1488 if fixup:
1484 if fixup:
1489 normal = self._repo.dirstate.normal
1485 normal = self._repo.dirstate.normal
1490 for f in fixup:
1486 for f in fixup:
1491 normal(f)
1487 normal(f)
1492 # write changes out explicitly, because nesting
1488 # write changes out explicitly, because nesting
1493 # wlock at runtime may prevent 'wlock.release()'
1489 # wlock at runtime may prevent 'wlock.release()'
1494 # after this block from doing so for subsequent
1490 # after this block from doing so for subsequent
1495 # changing files
1491 # changing files
1496 tr = self._repo.currenttransaction()
1492 tr = self._repo.currenttransaction()
1497 self._repo.dirstate.write(tr)
1493 self._repo.dirstate.write(tr)
1498
1494
1499 if poststatus:
1495 if poststatus:
1500 for ps in poststatus:
1496 for ps in poststatus:
1501 ps(self, status)
1497 ps(self, status)
1502 else:
1498 else:
1503 # in this case, writing changes out breaks
1499 # in this case, writing changes out breaks
1504 # consistency, because .hg/dirstate was
1500 # consistency, because .hg/dirstate was
1505 # already changed simultaneously after last
1501 # already changed simultaneously after last
1506 # caching (see also issue5584 for detail)
1502 # caching (see also issue5584 for detail)
1507 self._repo.ui.debug('skip updating dirstate: '
1503 self._repo.ui.debug('skip updating dirstate: '
1508 'identity mismatch\n')
1504 'identity mismatch\n')
1509 except error.LockError:
1505 except error.LockError:
1510 pass
1506 pass
1511 finally:
1507 finally:
1512 # Even if the wlock couldn't be grabbed, clear out the list.
1508 # Even if the wlock couldn't be grabbed, clear out the list.
1513 self._repo.clearpostdsstatus()
1509 self._repo.clearpostdsstatus()
1514
1510
1515 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1511 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1516 '''Gets the status from the dirstate -- internal use only.'''
1512 '''Gets the status from the dirstate -- internal use only.'''
1517 subrepos = []
1513 subrepos = []
1518 if '.hgsub' in self:
1514 if '.hgsub' in self:
1519 subrepos = sorted(self.substate)
1515 subrepos = sorted(self.substate)
1520 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1516 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1521 clean=clean, unknown=unknown)
1517 clean=clean, unknown=unknown)
1522
1518
1523 # check for any possibly clean files
1519 # check for any possibly clean files
1524 fixup = []
1520 fixup = []
1525 if cmp:
1521 if cmp:
1526 modified2, deleted2, fixup = self._checklookup(cmp)
1522 modified2, deleted2, fixup = self._checklookup(cmp)
1527 s.modified.extend(modified2)
1523 s.modified.extend(modified2)
1528 s.deleted.extend(deleted2)
1524 s.deleted.extend(deleted2)
1529
1525
1530 if fixup and clean:
1526 if fixup and clean:
1531 s.clean.extend(fixup)
1527 s.clean.extend(fixup)
1532
1528
1533 self._poststatusfixup(s, fixup)
1529 self._poststatusfixup(s, fixup)
1534
1530
1535 if match.always():
1531 if match.always():
1536 # cache for performance
1532 # cache for performance
1537 if s.unknown or s.ignored or s.clean:
1533 if s.unknown or s.ignored or s.clean:
1538 # "_status" is cached with list*=False in the normal route
1534 # "_status" is cached with list*=False in the normal route
1539 self._status = scmutil.status(s.modified, s.added, s.removed,
1535 self._status = scmutil.status(s.modified, s.added, s.removed,
1540 s.deleted, [], [], [])
1536 s.deleted, [], [], [])
1541 else:
1537 else:
1542 self._status = s
1538 self._status = s
1543
1539
1544 return s
1540 return s
1545
1541
1546 @propertycache
1542 @propertycache
1547 def _manifest(self):
1543 def _manifest(self):
1548 """generate a manifest corresponding to the values in self._status
1544 """generate a manifest corresponding to the values in self._status
1549
1545
1550 This reuse the file nodeid from parent, but we use special node
1546 This reuse the file nodeid from parent, but we use special node
1551 identifiers for added and modified files. This is used by manifests
1547 identifiers for added and modified files. This is used by manifests
1552 merge to see that files are different and by update logic to avoid
1548 merge to see that files are different and by update logic to avoid
1553 deleting newly added files.
1549 deleting newly added files.
1554 """
1550 """
1555 return self._buildstatusmanifest(self._status)
1551 return self._buildstatusmanifest(self._status)
1556
1552
1557 def _buildstatusmanifest(self, status):
1553 def _buildstatusmanifest(self, status):
1558 """Builds a manifest that includes the given status results."""
1554 """Builds a manifest that includes the given status results."""
1559 parents = self.parents()
1555 parents = self.parents()
1560
1556
1561 man = parents[0].manifest().copy()
1557 man = parents[0].manifest().copy()
1562
1558
1563 ff = self._flagfunc
1559 ff = self._flagfunc
1564 for i, l in ((addednodeid, status.added),
1560 for i, l in ((addednodeid, status.added),
1565 (modifiednodeid, status.modified)):
1561 (modifiednodeid, status.modified)):
1566 for f in l:
1562 for f in l:
1567 man[f] = i
1563 man[f] = i
1568 try:
1564 try:
1569 man.setflag(f, ff(f))
1565 man.setflag(f, ff(f))
1570 except OSError:
1566 except OSError:
1571 pass
1567 pass
1572
1568
1573 for f in status.deleted + status.removed:
1569 for f in status.deleted + status.removed:
1574 if f in man:
1570 if f in man:
1575 del man[f]
1571 del man[f]
1576
1572
1577 return man
1573 return man
1578
1574
1579 def _buildstatus(self, other, s, match, listignored, listclean,
1575 def _buildstatus(self, other, s, match, listignored, listclean,
1580 listunknown):
1576 listunknown):
1581 """build a status with respect to another context
1577 """build a status with respect to another context
1582
1578
1583 This includes logic for maintaining the fast path of status when
1579 This includes logic for maintaining the fast path of status when
1584 comparing the working directory against its parent, which is to skip
1580 comparing the working directory against its parent, which is to skip
1585 building a new manifest if self (working directory) is not comparing
1581 building a new manifest if self (working directory) is not comparing
1586 against its parent (repo['.']).
1582 against its parent (repo['.']).
1587 """
1583 """
1588 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1584 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1589 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1585 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1590 # might have accidentally ended up with the entire contents of the file
1586 # might have accidentally ended up with the entire contents of the file
1591 # they are supposed to be linking to.
1587 # they are supposed to be linking to.
1592 s.modified[:] = self._filtersuspectsymlink(s.modified)
1588 s.modified[:] = self._filtersuspectsymlink(s.modified)
1593 if other != self._repo['.']:
1589 if other != self._repo['.']:
1594 s = super(workingctx, self)._buildstatus(other, s, match,
1590 s = super(workingctx, self)._buildstatus(other, s, match,
1595 listignored, listclean,
1591 listignored, listclean,
1596 listunknown)
1592 listunknown)
1597 return s
1593 return s
1598
1594
1599 def _matchstatus(self, other, match):
1595 def _matchstatus(self, other, match):
1600 """override the match method with a filter for directory patterns
1596 """override the match method with a filter for directory patterns
1601
1597
1602 We use inheritance to customize the match.bad method only in cases of
1598 We use inheritance to customize the match.bad method only in cases of
1603 workingctx since it belongs only to the working directory when
1599 workingctx since it belongs only to the working directory when
1604 comparing against the parent changeset.
1600 comparing against the parent changeset.
1605
1601
1606 If we aren't comparing against the working directory's parent, then we
1602 If we aren't comparing against the working directory's parent, then we
1607 just use the default match object sent to us.
1603 just use the default match object sent to us.
1608 """
1604 """
1609 if other != self._repo['.']:
1605 if other != self._repo['.']:
1610 def bad(f, msg):
1606 def bad(f, msg):
1611 # 'f' may be a directory pattern from 'match.files()',
1607 # 'f' may be a directory pattern from 'match.files()',
1612 # so 'f not in ctx1' is not enough
1608 # so 'f not in ctx1' is not enough
1613 if f not in other and not other.hasdir(f):
1609 if f not in other and not other.hasdir(f):
1614 self._repo.ui.warn('%s: %s\n' %
1610 self._repo.ui.warn('%s: %s\n' %
1615 (self._repo.dirstate.pathto(f), msg))
1611 (self._repo.dirstate.pathto(f), msg))
1616 match.bad = bad
1612 match.bad = bad
1617 return match
1613 return match
1618
1614
1619 def markcommitted(self, node):
1615 def markcommitted(self, node):
1620 super(workingctx, self).markcommitted(node)
1616 super(workingctx, self).markcommitted(node)
1621
1617
1622 sparse.aftercommit(self._repo, node)
1618 sparse.aftercommit(self._repo, node)
1623
1619
1624 class committablefilectx(basefilectx):
1620 class committablefilectx(basefilectx):
1625 """A committablefilectx provides common functionality for a file context
1621 """A committablefilectx provides common functionality for a file context
1626 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1622 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1627 def __init__(self, repo, path, filelog=None, ctx=None):
1623 def __init__(self, repo, path, filelog=None, ctx=None):
1628 self._repo = repo
1624 self._repo = repo
1629 self._path = path
1625 self._path = path
1630 self._changeid = None
1626 self._changeid = None
1631 self._filerev = self._filenode = None
1627 self._filerev = self._filenode = None
1632
1628
1633 if filelog is not None:
1629 if filelog is not None:
1634 self._filelog = filelog
1630 self._filelog = filelog
1635 if ctx:
1631 if ctx:
1636 self._changectx = ctx
1632 self._changectx = ctx
1637
1633
1638 def __nonzero__(self):
1634 def __nonzero__(self):
1639 return True
1635 return True
1640
1636
1641 __bool__ = __nonzero__
1637 __bool__ = __nonzero__
1642
1638
1643 def linkrev(self):
1639 def linkrev(self):
1644 # linked to self._changectx no matter if file is modified or not
1640 # linked to self._changectx no matter if file is modified or not
1645 return self.rev()
1641 return self.rev()
1646
1642
1647 def parents(self):
1643 def parents(self):
1648 '''return parent filectxs, following copies if necessary'''
1644 '''return parent filectxs, following copies if necessary'''
1649 def filenode(ctx, path):
1645 def filenode(ctx, path):
1650 return ctx._manifest.get(path, nullid)
1646 return ctx._manifest.get(path, nullid)
1651
1647
1652 path = self._path
1648 path = self._path
1653 fl = self._filelog
1649 fl = self._filelog
1654 pcl = self._changectx._parents
1650 pcl = self._changectx._parents
1655 renamed = self.renamed()
1651 renamed = self.renamed()
1656
1652
1657 if renamed:
1653 if renamed:
1658 pl = [renamed + (None,)]
1654 pl = [renamed + (None,)]
1659 else:
1655 else:
1660 pl = [(path, filenode(pcl[0], path), fl)]
1656 pl = [(path, filenode(pcl[0], path), fl)]
1661
1657
1662 for pc in pcl[1:]:
1658 for pc in pcl[1:]:
1663 pl.append((path, filenode(pc, path), fl))
1659 pl.append((path, filenode(pc, path), fl))
1664
1660
1665 return [self._parentfilectx(p, fileid=n, filelog=l)
1661 return [self._parentfilectx(p, fileid=n, filelog=l)
1666 for p, n, l in pl if n != nullid]
1662 for p, n, l in pl if n != nullid]
1667
1663
1668 def children(self):
1664 def children(self):
1669 return []
1665 return []
1670
1666
1671 class workingfilectx(committablefilectx):
1667 class workingfilectx(committablefilectx):
1672 """A workingfilectx object makes access to data related to a particular
1668 """A workingfilectx object makes access to data related to a particular
1673 file in the working directory convenient."""
1669 file in the working directory convenient."""
1674 def __init__(self, repo, path, filelog=None, workingctx=None):
1670 def __init__(self, repo, path, filelog=None, workingctx=None):
1675 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1671 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1676
1672
1677 @propertycache
1673 @propertycache
1678 def _changectx(self):
1674 def _changectx(self):
1679 return workingctx(self._repo)
1675 return workingctx(self._repo)
1680
1676
1681 def data(self):
1677 def data(self):
1682 return self._repo.wread(self._path)
1678 return self._repo.wread(self._path)
1683 def renamed(self):
1679 def renamed(self):
1684 rp = self._repo.dirstate.copied(self._path)
1680 rp = self._repo.dirstate.copied(self._path)
1685 if not rp:
1681 if not rp:
1686 return None
1682 return None
1687 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1683 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1688
1684
1689 def size(self):
1685 def size(self):
1690 return self._repo.wvfs.lstat(self._path).st_size
1686 return self._repo.wvfs.lstat(self._path).st_size
1691 def date(self):
1687 def date(self):
1692 t, tz = self._changectx.date()
1688 t, tz = self._changectx.date()
1693 try:
1689 try:
1694 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1690 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1695 except OSError as err:
1691 except OSError as err:
1696 if err.errno != errno.ENOENT:
1692 if err.errno != errno.ENOENT:
1697 raise
1693 raise
1698 return (t, tz)
1694 return (t, tz)
1699
1695
1700 def exists(self):
1696 def exists(self):
1701 return self._repo.wvfs.exists(self._path)
1697 return self._repo.wvfs.exists(self._path)
1702
1698
1703 def lexists(self):
1699 def lexists(self):
1704 return self._repo.wvfs.lexists(self._path)
1700 return self._repo.wvfs.lexists(self._path)
1705
1701
1706 def audit(self):
1702 def audit(self):
1707 return self._repo.wvfs.audit(self._path)
1703 return self._repo.wvfs.audit(self._path)
1708
1704
1709 def cmp(self, fctx):
1705 def cmp(self, fctx):
1710 """compare with other file context
1706 """compare with other file context
1711
1707
1712 returns True if different than fctx.
1708 returns True if different than fctx.
1713 """
1709 """
1714 # fctx should be a filectx (not a workingfilectx)
1710 # fctx should be a filectx (not a workingfilectx)
1715 # invert comparison to reuse the same code path
1711 # invert comparison to reuse the same code path
1716 return fctx.cmp(self)
1712 return fctx.cmp(self)
1717
1713
1718 def remove(self, ignoremissing=False):
1714 def remove(self, ignoremissing=False):
1719 """wraps unlink for a repo's working directory"""
1715 """wraps unlink for a repo's working directory"""
1720 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1716 rmdir = self._repo.ui.configbool('experimental', 'removeemptydirs')
1721 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1717 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing,
1722 rmdir=rmdir)
1718 rmdir=rmdir)
1723
1719
1724 def write(self, data, flags, backgroundclose=False, **kwargs):
1720 def write(self, data, flags, backgroundclose=False, **kwargs):
1725 """wraps repo.wwrite"""
1721 """wraps repo.wwrite"""
1726 self._repo.wwrite(self._path, data, flags,
1722 self._repo.wwrite(self._path, data, flags,
1727 backgroundclose=backgroundclose,
1723 backgroundclose=backgroundclose,
1728 **kwargs)
1724 **kwargs)
1729
1725
1730 def markcopied(self, src):
1726 def markcopied(self, src):
1731 """marks this file a copy of `src`"""
1727 """marks this file a copy of `src`"""
1732 if self._repo.dirstate[self._path] in "nma":
1728 if self._repo.dirstate[self._path] in "nma":
1733 self._repo.dirstate.copy(src, self._path)
1729 self._repo.dirstate.copy(src, self._path)
1734
1730
1735 def clearunknown(self):
1731 def clearunknown(self):
1736 """Removes conflicting items in the working directory so that
1732 """Removes conflicting items in the working directory so that
1737 ``write()`` can be called successfully.
1733 ``write()`` can be called successfully.
1738 """
1734 """
1739 wvfs = self._repo.wvfs
1735 wvfs = self._repo.wvfs
1740 f = self._path
1736 f = self._path
1741 wvfs.audit(f)
1737 wvfs.audit(f)
1742 if wvfs.isdir(f) and not wvfs.islink(f):
1738 if wvfs.isdir(f) and not wvfs.islink(f):
1743 wvfs.rmtree(f, forcibly=True)
1739 wvfs.rmtree(f, forcibly=True)
1744 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1740 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1745 for p in reversed(list(util.finddirs(f))):
1741 for p in reversed(list(util.finddirs(f))):
1746 if wvfs.isfileorlink(p):
1742 if wvfs.isfileorlink(p):
1747 wvfs.unlink(p)
1743 wvfs.unlink(p)
1748 break
1744 break
1749
1745
1750 def setflags(self, l, x):
1746 def setflags(self, l, x):
1751 self._repo.wvfs.setflags(self._path, l, x)
1747 self._repo.wvfs.setflags(self._path, l, x)
1752
1748
1753 class overlayworkingctx(committablectx):
1749 class overlayworkingctx(committablectx):
1754 """Wraps another mutable context with a write-back cache that can be
1750 """Wraps another mutable context with a write-back cache that can be
1755 converted into a commit context.
1751 converted into a commit context.
1756
1752
1757 self._cache[path] maps to a dict with keys: {
1753 self._cache[path] maps to a dict with keys: {
1758 'exists': bool?
1754 'exists': bool?
1759 'date': date?
1755 'date': date?
1760 'data': str?
1756 'data': str?
1761 'flags': str?
1757 'flags': str?
1762 'copied': str? (path or None)
1758 'copied': str? (path or None)
1763 }
1759 }
1764 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1760 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1765 is `False`, the file was deleted.
1761 is `False`, the file was deleted.
1766 """
1762 """
1767
1763
1768 def __init__(self, repo):
1764 def __init__(self, repo):
1769 super(overlayworkingctx, self).__init__(repo)
1765 super(overlayworkingctx, self).__init__(repo)
1770 self.clean()
1766 self.clean()
1771
1767
1772 def setbase(self, wrappedctx):
1768 def setbase(self, wrappedctx):
1773 self._wrappedctx = wrappedctx
1769 self._wrappedctx = wrappedctx
1774 self._parents = [wrappedctx]
1770 self._parents = [wrappedctx]
1775 # Drop old manifest cache as it is now out of date.
1771 # Drop old manifest cache as it is now out of date.
1776 # This is necessary when, e.g., rebasing several nodes with one
1772 # This is necessary when, e.g., rebasing several nodes with one
1777 # ``overlayworkingctx`` (e.g. with --collapse).
1773 # ``overlayworkingctx`` (e.g. with --collapse).
1778 util.clearcachedproperty(self, '_manifest')
1774 util.clearcachedproperty(self, '_manifest')
1779
1775
1780 def data(self, path):
1776 def data(self, path):
1781 if self.isdirty(path):
1777 if self.isdirty(path):
1782 if self._cache[path]['exists']:
1778 if self._cache[path]['exists']:
1783 if self._cache[path]['data']:
1779 if self._cache[path]['data']:
1784 return self._cache[path]['data']
1780 return self._cache[path]['data']
1785 else:
1781 else:
1786 # Must fallback here, too, because we only set flags.
1782 # Must fallback here, too, because we only set flags.
1787 return self._wrappedctx[path].data()
1783 return self._wrappedctx[path].data()
1788 else:
1784 else:
1789 raise error.ProgrammingError("No such file or directory: %s" %
1785 raise error.ProgrammingError("No such file or directory: %s" %
1790 path)
1786 path)
1791 else:
1787 else:
1792 return self._wrappedctx[path].data()
1788 return self._wrappedctx[path].data()
1793
1789
1794 @propertycache
1790 @propertycache
1795 def _manifest(self):
1791 def _manifest(self):
1796 parents = self.parents()
1792 parents = self.parents()
1797 man = parents[0].manifest().copy()
1793 man = parents[0].manifest().copy()
1798
1794
1799 flag = self._flagfunc
1795 flag = self._flagfunc
1800 for path in self.added():
1796 for path in self.added():
1801 man[path] = addednodeid
1797 man[path] = addednodeid
1802 man.setflag(path, flag(path))
1798 man.setflag(path, flag(path))
1803 for path in self.modified():
1799 for path in self.modified():
1804 man[path] = modifiednodeid
1800 man[path] = modifiednodeid
1805 man.setflag(path, flag(path))
1801 man.setflag(path, flag(path))
1806 for path in self.removed():
1802 for path in self.removed():
1807 del man[path]
1803 del man[path]
1808 return man
1804 return man
1809
1805
1810 @propertycache
1806 @propertycache
1811 def _flagfunc(self):
1807 def _flagfunc(self):
1812 def f(path):
1808 def f(path):
1813 return self._cache[path]['flags']
1809 return self._cache[path]['flags']
1814 return f
1810 return f
1815
1811
1816 def files(self):
1812 def files(self):
1817 return sorted(self.added() + self.modified() + self.removed())
1813 return sorted(self.added() + self.modified() + self.removed())
1818
1814
1819 def modified(self):
1815 def modified(self):
1820 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1816 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1821 self._existsinparent(f)]
1817 self._existsinparent(f)]
1822
1818
1823 def added(self):
1819 def added(self):
1824 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1820 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1825 not self._existsinparent(f)]
1821 not self._existsinparent(f)]
1826
1822
1827 def removed(self):
1823 def removed(self):
1828 return [f for f in self._cache.keys() if
1824 return [f for f in self._cache.keys() if
1829 not self._cache[f]['exists'] and self._existsinparent(f)]
1825 not self._cache[f]['exists'] and self._existsinparent(f)]
1830
1826
1831 def isinmemory(self):
1827 def isinmemory(self):
1832 return True
1828 return True
1833
1829
1834 def filedate(self, path):
1830 def filedate(self, path):
1835 if self.isdirty(path):
1831 if self.isdirty(path):
1836 return self._cache[path]['date']
1832 return self._cache[path]['date']
1837 else:
1833 else:
1838 return self._wrappedctx[path].date()
1834 return self._wrappedctx[path].date()
1839
1835
1840 def markcopied(self, path, origin):
1836 def markcopied(self, path, origin):
1841 if self.isdirty(path):
1837 if self.isdirty(path):
1842 self._cache[path]['copied'] = origin
1838 self._cache[path]['copied'] = origin
1843 else:
1839 else:
1844 raise error.ProgrammingError('markcopied() called on clean context')
1840 raise error.ProgrammingError('markcopied() called on clean context')
1845
1841
1846 def copydata(self, path):
1842 def copydata(self, path):
1847 if self.isdirty(path):
1843 if self.isdirty(path):
1848 return self._cache[path]['copied']
1844 return self._cache[path]['copied']
1849 else:
1845 else:
1850 raise error.ProgrammingError('copydata() called on clean context')
1846 raise error.ProgrammingError('copydata() called on clean context')
1851
1847
1852 def flags(self, path):
1848 def flags(self, path):
1853 if self.isdirty(path):
1849 if self.isdirty(path):
1854 if self._cache[path]['exists']:
1850 if self._cache[path]['exists']:
1855 return self._cache[path]['flags']
1851 return self._cache[path]['flags']
1856 else:
1852 else:
1857 raise error.ProgrammingError("No such file or directory: %s" %
1853 raise error.ProgrammingError("No such file or directory: %s" %
1858 self._path)
1854 self._path)
1859 else:
1855 else:
1860 return self._wrappedctx[path].flags()
1856 return self._wrappedctx[path].flags()
1861
1857
1862 def _existsinparent(self, path):
1858 def _existsinparent(self, path):
1863 try:
1859 try:
1864 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1860 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1865 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1861 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1866 # with an ``exists()`` function.
1862 # with an ``exists()`` function.
1867 self._wrappedctx[path]
1863 self._wrappedctx[path]
1868 return True
1864 return True
1869 except error.ManifestLookupError:
1865 except error.ManifestLookupError:
1870 return False
1866 return False
1871
1867
1872 def _auditconflicts(self, path):
1868 def _auditconflicts(self, path):
1873 """Replicates conflict checks done by wvfs.write().
1869 """Replicates conflict checks done by wvfs.write().
1874
1870
1875 Since we never write to the filesystem and never call `applyupdates` in
1871 Since we never write to the filesystem and never call `applyupdates` in
1876 IMM, we'll never check that a path is actually writable -- e.g., because
1872 IMM, we'll never check that a path is actually writable -- e.g., because
1877 it adds `a/foo`, but `a` is actually a file in the other commit.
1873 it adds `a/foo`, but `a` is actually a file in the other commit.
1878 """
1874 """
1879 def fail(path, component):
1875 def fail(path, component):
1880 # p1() is the base and we're receiving "writes" for p2()'s
1876 # p1() is the base and we're receiving "writes" for p2()'s
1881 # files.
1877 # files.
1882 if 'l' in self.p1()[component].flags():
1878 if 'l' in self.p1()[component].flags():
1883 raise error.Abort("error: %s conflicts with symlink %s "
1879 raise error.Abort("error: %s conflicts with symlink %s "
1884 "in %s." % (path, component,
1880 "in %s." % (path, component,
1885 self.p1().rev()))
1881 self.p1().rev()))
1886 else:
1882 else:
1887 raise error.Abort("error: '%s' conflicts with file '%s' in "
1883 raise error.Abort("error: '%s' conflicts with file '%s' in "
1888 "%s." % (path, component,
1884 "%s." % (path, component,
1889 self.p1().rev()))
1885 self.p1().rev()))
1890
1886
1891 # Test that each new directory to be created to write this path from p2
1887 # Test that each new directory to be created to write this path from p2
1892 # is not a file in p1.
1888 # is not a file in p1.
1893 components = path.split('/')
1889 components = path.split('/')
1894 for i in xrange(len(components)):
1890 for i in xrange(len(components)):
1895 component = "/".join(components[0:i])
1891 component = "/".join(components[0:i])
1896 if component in self.p1():
1892 if component in self.p1():
1897 fail(path, component)
1893 fail(path, component)
1898
1894
1899 # Test the other direction -- that this path from p2 isn't a directory
1895 # Test the other direction -- that this path from p2 isn't a directory
1900 # in p1 (test that p1 doesn't any paths matching `path/*`).
1896 # in p1 (test that p1 doesn't any paths matching `path/*`).
1901 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1897 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1902 matches = self.p1().manifest().matches(match)
1898 matches = self.p1().manifest().matches(match)
1903 if len(matches) > 0:
1899 if len(matches) > 0:
1904 if len(matches) == 1 and matches.keys()[0] == path:
1900 if len(matches) == 1 and matches.keys()[0] == path:
1905 return
1901 return
1906 raise error.Abort("error: file '%s' cannot be written because "
1902 raise error.Abort("error: file '%s' cannot be written because "
1907 " '%s/' is a folder in %s (containing %d "
1903 " '%s/' is a folder in %s (containing %d "
1908 "entries: %s)"
1904 "entries: %s)"
1909 % (path, path, self.p1(), len(matches),
1905 % (path, path, self.p1(), len(matches),
1910 ', '.join(matches.keys())))
1906 ', '.join(matches.keys())))
1911
1907
1912 def write(self, path, data, flags='', **kwargs):
1908 def write(self, path, data, flags='', **kwargs):
1913 if data is None:
1909 if data is None:
1914 raise error.ProgrammingError("data must be non-None")
1910 raise error.ProgrammingError("data must be non-None")
1915 self._auditconflicts(path)
1911 self._auditconflicts(path)
1916 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1912 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1917 flags=flags)
1913 flags=flags)
1918
1914
1919 def setflags(self, path, l, x):
1915 def setflags(self, path, l, x):
1920 self._markdirty(path, exists=True, date=dateutil.makedate(),
1916 self._markdirty(path, exists=True, date=dateutil.makedate(),
1921 flags=(l and 'l' or '') + (x and 'x' or ''))
1917 flags=(l and 'l' or '') + (x and 'x' or ''))
1922
1918
1923 def remove(self, path):
1919 def remove(self, path):
1924 self._markdirty(path, exists=False)
1920 self._markdirty(path, exists=False)
1925
1921
1926 def exists(self, path):
1922 def exists(self, path):
1927 """exists behaves like `lexists`, but needs to follow symlinks and
1923 """exists behaves like `lexists`, but needs to follow symlinks and
1928 return False if they are broken.
1924 return False if they are broken.
1929 """
1925 """
1930 if self.isdirty(path):
1926 if self.isdirty(path):
1931 # If this path exists and is a symlink, "follow" it by calling
1927 # If this path exists and is a symlink, "follow" it by calling
1932 # exists on the destination path.
1928 # exists on the destination path.
1933 if (self._cache[path]['exists'] and
1929 if (self._cache[path]['exists'] and
1934 'l' in self._cache[path]['flags']):
1930 'l' in self._cache[path]['flags']):
1935 return self.exists(self._cache[path]['data'].strip())
1931 return self.exists(self._cache[path]['data'].strip())
1936 else:
1932 else:
1937 return self._cache[path]['exists']
1933 return self._cache[path]['exists']
1938
1934
1939 return self._existsinparent(path)
1935 return self._existsinparent(path)
1940
1936
1941 def lexists(self, path):
1937 def lexists(self, path):
1942 """lexists returns True if the path exists"""
1938 """lexists returns True if the path exists"""
1943 if self.isdirty(path):
1939 if self.isdirty(path):
1944 return self._cache[path]['exists']
1940 return self._cache[path]['exists']
1945
1941
1946 return self._existsinparent(path)
1942 return self._existsinparent(path)
1947
1943
1948 def size(self, path):
1944 def size(self, path):
1949 if self.isdirty(path):
1945 if self.isdirty(path):
1950 if self._cache[path]['exists']:
1946 if self._cache[path]['exists']:
1951 return len(self._cache[path]['data'])
1947 return len(self._cache[path]['data'])
1952 else:
1948 else:
1953 raise error.ProgrammingError("No such file or directory: %s" %
1949 raise error.ProgrammingError("No such file or directory: %s" %
1954 self._path)
1950 self._path)
1955 return self._wrappedctx[path].size()
1951 return self._wrappedctx[path].size()
1956
1952
1957 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1953 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
1958 user=None, editor=None):
1954 user=None, editor=None):
1959 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1955 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
1960 committed.
1956 committed.
1961
1957
1962 ``text`` is the commit message.
1958 ``text`` is the commit message.
1963 ``parents`` (optional) are rev numbers.
1959 ``parents`` (optional) are rev numbers.
1964 """
1960 """
1965 # Default parents to the wrapped contexts' if not passed.
1961 # Default parents to the wrapped contexts' if not passed.
1966 if parents is None:
1962 if parents is None:
1967 parents = self._wrappedctx.parents()
1963 parents = self._wrappedctx.parents()
1968 if len(parents) == 1:
1964 if len(parents) == 1:
1969 parents = (parents[0], None)
1965 parents = (parents[0], None)
1970
1966
1971 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1967 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
1972 if parents[1] is None:
1968 if parents[1] is None:
1973 parents = (self._repo[parents[0]], None)
1969 parents = (self._repo[parents[0]], None)
1974 else:
1970 else:
1975 parents = (self._repo[parents[0]], self._repo[parents[1]])
1971 parents = (self._repo[parents[0]], self._repo[parents[1]])
1976
1972
1977 files = self._cache.keys()
1973 files = self._cache.keys()
1978 def getfile(repo, memctx, path):
1974 def getfile(repo, memctx, path):
1979 if self._cache[path]['exists']:
1975 if self._cache[path]['exists']:
1980 return memfilectx(repo, memctx, path,
1976 return memfilectx(repo, memctx, path,
1981 self._cache[path]['data'],
1977 self._cache[path]['data'],
1982 'l' in self._cache[path]['flags'],
1978 'l' in self._cache[path]['flags'],
1983 'x' in self._cache[path]['flags'],
1979 'x' in self._cache[path]['flags'],
1984 self._cache[path]['copied'])
1980 self._cache[path]['copied'])
1985 else:
1981 else:
1986 # Returning None, but including the path in `files`, is
1982 # Returning None, but including the path in `files`, is
1987 # necessary for memctx to register a deletion.
1983 # necessary for memctx to register a deletion.
1988 return None
1984 return None
1989 return memctx(self._repo, parents, text, files, getfile, date=date,
1985 return memctx(self._repo, parents, text, files, getfile, date=date,
1990 extra=extra, user=user, branch=branch, editor=editor)
1986 extra=extra, user=user, branch=branch, editor=editor)
1991
1987
1992 def isdirty(self, path):
1988 def isdirty(self, path):
1993 return path in self._cache
1989 return path in self._cache
1994
1990
1995 def isempty(self):
1991 def isempty(self):
1996 # We need to discard any keys that are actually clean before the empty
1992 # We need to discard any keys that are actually clean before the empty
1997 # commit check.
1993 # commit check.
1998 self._compact()
1994 self._compact()
1999 return len(self._cache) == 0
1995 return len(self._cache) == 0
2000
1996
2001 def clean(self):
1997 def clean(self):
2002 self._cache = {}
1998 self._cache = {}
2003
1999
2004 def _compact(self):
2000 def _compact(self):
2005 """Removes keys from the cache that are actually clean, by comparing
2001 """Removes keys from the cache that are actually clean, by comparing
2006 them with the underlying context.
2002 them with the underlying context.
2007
2003
2008 This can occur during the merge process, e.g. by passing --tool :local
2004 This can occur during the merge process, e.g. by passing --tool :local
2009 to resolve a conflict.
2005 to resolve a conflict.
2010 """
2006 """
2011 keys = []
2007 keys = []
2012 for path in self._cache.keys():
2008 for path in self._cache.keys():
2013 cache = self._cache[path]
2009 cache = self._cache[path]
2014 try:
2010 try:
2015 underlying = self._wrappedctx[path]
2011 underlying = self._wrappedctx[path]
2016 if (underlying.data() == cache['data'] and
2012 if (underlying.data() == cache['data'] and
2017 underlying.flags() == cache['flags']):
2013 underlying.flags() == cache['flags']):
2018 keys.append(path)
2014 keys.append(path)
2019 except error.ManifestLookupError:
2015 except error.ManifestLookupError:
2020 # Path not in the underlying manifest (created).
2016 # Path not in the underlying manifest (created).
2021 continue
2017 continue
2022
2018
2023 for path in keys:
2019 for path in keys:
2024 del self._cache[path]
2020 del self._cache[path]
2025 return keys
2021 return keys
2026
2022
2027 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2023 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2028 self._cache[path] = {
2024 self._cache[path] = {
2029 'exists': exists,
2025 'exists': exists,
2030 'data': data,
2026 'data': data,
2031 'date': date,
2027 'date': date,
2032 'flags': flags,
2028 'flags': flags,
2033 'copied': None,
2029 'copied': None,
2034 }
2030 }
2035
2031
2036 def filectx(self, path, filelog=None):
2032 def filectx(self, path, filelog=None):
2037 return overlayworkingfilectx(self._repo, path, parent=self,
2033 return overlayworkingfilectx(self._repo, path, parent=self,
2038 filelog=filelog)
2034 filelog=filelog)
2039
2035
2040 class overlayworkingfilectx(committablefilectx):
2036 class overlayworkingfilectx(committablefilectx):
2041 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2037 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2042 cache, which can be flushed through later by calling ``flush()``."""
2038 cache, which can be flushed through later by calling ``flush()``."""
2043
2039
2044 def __init__(self, repo, path, filelog=None, parent=None):
2040 def __init__(self, repo, path, filelog=None, parent=None):
2045 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2041 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2046 parent)
2042 parent)
2047 self._repo = repo
2043 self._repo = repo
2048 self._parent = parent
2044 self._parent = parent
2049 self._path = path
2045 self._path = path
2050
2046
2051 def cmp(self, fctx):
2047 def cmp(self, fctx):
2052 return self.data() != fctx.data()
2048 return self.data() != fctx.data()
2053
2049
2054 def changectx(self):
2050 def changectx(self):
2055 return self._parent
2051 return self._parent
2056
2052
2057 def data(self):
2053 def data(self):
2058 return self._parent.data(self._path)
2054 return self._parent.data(self._path)
2059
2055
2060 def date(self):
2056 def date(self):
2061 return self._parent.filedate(self._path)
2057 return self._parent.filedate(self._path)
2062
2058
2063 def exists(self):
2059 def exists(self):
2064 return self.lexists()
2060 return self.lexists()
2065
2061
2066 def lexists(self):
2062 def lexists(self):
2067 return self._parent.exists(self._path)
2063 return self._parent.exists(self._path)
2068
2064
2069 def renamed(self):
2065 def renamed(self):
2070 path = self._parent.copydata(self._path)
2066 path = self._parent.copydata(self._path)
2071 if not path:
2067 if not path:
2072 return None
2068 return None
2073 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2069 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2074
2070
2075 def size(self):
2071 def size(self):
2076 return self._parent.size(self._path)
2072 return self._parent.size(self._path)
2077
2073
2078 def markcopied(self, origin):
2074 def markcopied(self, origin):
2079 self._parent.markcopied(self._path, origin)
2075 self._parent.markcopied(self._path, origin)
2080
2076
2081 def audit(self):
2077 def audit(self):
2082 pass
2078 pass
2083
2079
2084 def flags(self):
2080 def flags(self):
2085 return self._parent.flags(self._path)
2081 return self._parent.flags(self._path)
2086
2082
2087 def setflags(self, islink, isexec):
2083 def setflags(self, islink, isexec):
2088 return self._parent.setflags(self._path, islink, isexec)
2084 return self._parent.setflags(self._path, islink, isexec)
2089
2085
2090 def write(self, data, flags, backgroundclose=False, **kwargs):
2086 def write(self, data, flags, backgroundclose=False, **kwargs):
2091 return self._parent.write(self._path, data, flags, **kwargs)
2087 return self._parent.write(self._path, data, flags, **kwargs)
2092
2088
2093 def remove(self, ignoremissing=False):
2089 def remove(self, ignoremissing=False):
2094 return self._parent.remove(self._path)
2090 return self._parent.remove(self._path)
2095
2091
2096 def clearunknown(self):
2092 def clearunknown(self):
2097 pass
2093 pass
2098
2094
2099 class workingcommitctx(workingctx):
2095 class workingcommitctx(workingctx):
2100 """A workingcommitctx object makes access to data related to
2096 """A workingcommitctx object makes access to data related to
2101 the revision being committed convenient.
2097 the revision being committed convenient.
2102
2098
2103 This hides changes in the working directory, if they aren't
2099 This hides changes in the working directory, if they aren't
2104 committed in this context.
2100 committed in this context.
2105 """
2101 """
2106 def __init__(self, repo, changes,
2102 def __init__(self, repo, changes,
2107 text="", user=None, date=None, extra=None):
2103 text="", user=None, date=None, extra=None):
2108 super(workingctx, self).__init__(repo, text, user, date, extra,
2104 super(workingctx, self).__init__(repo, text, user, date, extra,
2109 changes)
2105 changes)
2110
2106
2111 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2107 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2112 """Return matched files only in ``self._status``
2108 """Return matched files only in ``self._status``
2113
2109
2114 Uncommitted files appear "clean" via this context, even if
2110 Uncommitted files appear "clean" via this context, even if
2115 they aren't actually so in the working directory.
2111 they aren't actually so in the working directory.
2116 """
2112 """
2117 if clean:
2113 if clean:
2118 clean = [f for f in self._manifest if f not in self._changedset]
2114 clean = [f for f in self._manifest if f not in self._changedset]
2119 else:
2115 else:
2120 clean = []
2116 clean = []
2121 return scmutil.status([f for f in self._status.modified if match(f)],
2117 return scmutil.status([f for f in self._status.modified if match(f)],
2122 [f for f in self._status.added if match(f)],
2118 [f for f in self._status.added if match(f)],
2123 [f for f in self._status.removed if match(f)],
2119 [f for f in self._status.removed if match(f)],
2124 [], [], [], clean)
2120 [], [], [], clean)
2125
2121
2126 @propertycache
2122 @propertycache
2127 def _changedset(self):
2123 def _changedset(self):
2128 """Return the set of files changed in this context
2124 """Return the set of files changed in this context
2129 """
2125 """
2130 changed = set(self._status.modified)
2126 changed = set(self._status.modified)
2131 changed.update(self._status.added)
2127 changed.update(self._status.added)
2132 changed.update(self._status.removed)
2128 changed.update(self._status.removed)
2133 return changed
2129 return changed
2134
2130
2135 def makecachingfilectxfn(func):
2131 def makecachingfilectxfn(func):
2136 """Create a filectxfn that caches based on the path.
2132 """Create a filectxfn that caches based on the path.
2137
2133
2138 We can't use util.cachefunc because it uses all arguments as the cache
2134 We can't use util.cachefunc because it uses all arguments as the cache
2139 key and this creates a cycle since the arguments include the repo and
2135 key and this creates a cycle since the arguments include the repo and
2140 memctx.
2136 memctx.
2141 """
2137 """
2142 cache = {}
2138 cache = {}
2143
2139
2144 def getfilectx(repo, memctx, path):
2140 def getfilectx(repo, memctx, path):
2145 if path not in cache:
2141 if path not in cache:
2146 cache[path] = func(repo, memctx, path)
2142 cache[path] = func(repo, memctx, path)
2147 return cache[path]
2143 return cache[path]
2148
2144
2149 return getfilectx
2145 return getfilectx
2150
2146
2151 def memfilefromctx(ctx):
2147 def memfilefromctx(ctx):
2152 """Given a context return a memfilectx for ctx[path]
2148 """Given a context return a memfilectx for ctx[path]
2153
2149
2154 This is a convenience method for building a memctx based on another
2150 This is a convenience method for building a memctx based on another
2155 context.
2151 context.
2156 """
2152 """
2157 def getfilectx(repo, memctx, path):
2153 def getfilectx(repo, memctx, path):
2158 fctx = ctx[path]
2154 fctx = ctx[path]
2159 # this is weird but apparently we only keep track of one parent
2155 # this is weird but apparently we only keep track of one parent
2160 # (why not only store that instead of a tuple?)
2156 # (why not only store that instead of a tuple?)
2161 copied = fctx.renamed()
2157 copied = fctx.renamed()
2162 if copied:
2158 if copied:
2163 copied = copied[0]
2159 copied = copied[0]
2164 return memfilectx(repo, memctx, path, fctx.data(),
2160 return memfilectx(repo, memctx, path, fctx.data(),
2165 islink=fctx.islink(), isexec=fctx.isexec(),
2161 islink=fctx.islink(), isexec=fctx.isexec(),
2166 copied=copied)
2162 copied=copied)
2167
2163
2168 return getfilectx
2164 return getfilectx
2169
2165
2170 def memfilefrompatch(patchstore):
2166 def memfilefrompatch(patchstore):
2171 """Given a patch (e.g. patchstore object) return a memfilectx
2167 """Given a patch (e.g. patchstore object) return a memfilectx
2172
2168
2173 This is a convenience method for building a memctx based on a patchstore.
2169 This is a convenience method for building a memctx based on a patchstore.
2174 """
2170 """
2175 def getfilectx(repo, memctx, path):
2171 def getfilectx(repo, memctx, path):
2176 data, mode, copied = patchstore.getfile(path)
2172 data, mode, copied = patchstore.getfile(path)
2177 if data is None:
2173 if data is None:
2178 return None
2174 return None
2179 islink, isexec = mode
2175 islink, isexec = mode
2180 return memfilectx(repo, memctx, path, data, islink=islink,
2176 return memfilectx(repo, memctx, path, data, islink=islink,
2181 isexec=isexec, copied=copied)
2177 isexec=isexec, copied=copied)
2182
2178
2183 return getfilectx
2179 return getfilectx
2184
2180
2185 class memctx(committablectx):
2181 class memctx(committablectx):
2186 """Use memctx to perform in-memory commits via localrepo.commitctx().
2182 """Use memctx to perform in-memory commits via localrepo.commitctx().
2187
2183
2188 Revision information is supplied at initialization time while
2184 Revision information is supplied at initialization time while
2189 related files data and is made available through a callback
2185 related files data and is made available through a callback
2190 mechanism. 'repo' is the current localrepo, 'parents' is a
2186 mechanism. 'repo' is the current localrepo, 'parents' is a
2191 sequence of two parent revisions identifiers (pass None for every
2187 sequence of two parent revisions identifiers (pass None for every
2192 missing parent), 'text' is the commit message and 'files' lists
2188 missing parent), 'text' is the commit message and 'files' lists
2193 names of files touched by the revision (normalized and relative to
2189 names of files touched by the revision (normalized and relative to
2194 repository root).
2190 repository root).
2195
2191
2196 filectxfn(repo, memctx, path) is a callable receiving the
2192 filectxfn(repo, memctx, path) is a callable receiving the
2197 repository, the current memctx object and the normalized path of
2193 repository, the current memctx object and the normalized path of
2198 requested file, relative to repository root. It is fired by the
2194 requested file, relative to repository root. It is fired by the
2199 commit function for every file in 'files', but calls order is
2195 commit function for every file in 'files', but calls order is
2200 undefined. If the file is available in the revision being
2196 undefined. If the file is available in the revision being
2201 committed (updated or added), filectxfn returns a memfilectx
2197 committed (updated or added), filectxfn returns a memfilectx
2202 object. If the file was removed, filectxfn return None for recent
2198 object. If the file was removed, filectxfn return None for recent
2203 Mercurial. Moved files are represented by marking the source file
2199 Mercurial. Moved files are represented by marking the source file
2204 removed and the new file added with copy information (see
2200 removed and the new file added with copy information (see
2205 memfilectx).
2201 memfilectx).
2206
2202
2207 user receives the committer name and defaults to current
2203 user receives the committer name and defaults to current
2208 repository username, date is the commit date in any format
2204 repository username, date is the commit date in any format
2209 supported by dateutil.parsedate() and defaults to current date, extra
2205 supported by dateutil.parsedate() and defaults to current date, extra
2210 is a dictionary of metadata or is left empty.
2206 is a dictionary of metadata or is left empty.
2211 """
2207 """
2212
2208
2213 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2209 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2214 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2210 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2215 # this field to determine what to do in filectxfn.
2211 # this field to determine what to do in filectxfn.
2216 _returnnoneformissingfiles = True
2212 _returnnoneformissingfiles = True
2217
2213
2218 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2214 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2219 date=None, extra=None, branch=None, editor=False):
2215 date=None, extra=None, branch=None, editor=False):
2220 super(memctx, self).__init__(repo, text, user, date, extra)
2216 super(memctx, self).__init__(repo, text, user, date, extra)
2221 self._rev = None
2217 self._rev = None
2222 self._node = None
2218 self._node = None
2223 parents = [(p or nullid) for p in parents]
2219 parents = [(p or nullid) for p in parents]
2224 p1, p2 = parents
2220 p1, p2 = parents
2225 self._parents = [self._repo[p] for p in (p1, p2)]
2221 self._parents = [self._repo[p] for p in (p1, p2)]
2226 files = sorted(set(files))
2222 files = sorted(set(files))
2227 self._files = files
2223 self._files = files
2228 if branch is not None:
2224 if branch is not None:
2229 self._extra['branch'] = encoding.fromlocal(branch)
2225 self._extra['branch'] = encoding.fromlocal(branch)
2230 self.substate = {}
2226 self.substate = {}
2231
2227
2232 if isinstance(filectxfn, patch.filestore):
2228 if isinstance(filectxfn, patch.filestore):
2233 filectxfn = memfilefrompatch(filectxfn)
2229 filectxfn = memfilefrompatch(filectxfn)
2234 elif not callable(filectxfn):
2230 elif not callable(filectxfn):
2235 # if store is not callable, wrap it in a function
2231 # if store is not callable, wrap it in a function
2236 filectxfn = memfilefromctx(filectxfn)
2232 filectxfn = memfilefromctx(filectxfn)
2237
2233
2238 # memoizing increases performance for e.g. vcs convert scenarios.
2234 # memoizing increases performance for e.g. vcs convert scenarios.
2239 self._filectxfn = makecachingfilectxfn(filectxfn)
2235 self._filectxfn = makecachingfilectxfn(filectxfn)
2240
2236
2241 if editor:
2237 if editor:
2242 self._text = editor(self._repo, self, [])
2238 self._text = editor(self._repo, self, [])
2243 self._repo.savecommitmessage(self._text)
2239 self._repo.savecommitmessage(self._text)
2244
2240
2245 def filectx(self, path, filelog=None):
2241 def filectx(self, path, filelog=None):
2246 """get a file context from the working directory
2242 """get a file context from the working directory
2247
2243
2248 Returns None if file doesn't exist and should be removed."""
2244 Returns None if file doesn't exist and should be removed."""
2249 return self._filectxfn(self._repo, self, path)
2245 return self._filectxfn(self._repo, self, path)
2250
2246
2251 def commit(self):
2247 def commit(self):
2252 """commit context to the repo"""
2248 """commit context to the repo"""
2253 return self._repo.commitctx(self)
2249 return self._repo.commitctx(self)
2254
2250
2255 @propertycache
2251 @propertycache
2256 def _manifest(self):
2252 def _manifest(self):
2257 """generate a manifest based on the return values of filectxfn"""
2253 """generate a manifest based on the return values of filectxfn"""
2258
2254
2259 # keep this simple for now; just worry about p1
2255 # keep this simple for now; just worry about p1
2260 pctx = self._parents[0]
2256 pctx = self._parents[0]
2261 man = pctx.manifest().copy()
2257 man = pctx.manifest().copy()
2262
2258
2263 for f in self._status.modified:
2259 for f in self._status.modified:
2264 p1node = nullid
2260 p1node = nullid
2265 p2node = nullid
2261 p2node = nullid
2266 p = pctx[f].parents() # if file isn't in pctx, check p2?
2262 p = pctx[f].parents() # if file isn't in pctx, check p2?
2267 if len(p) > 0:
2263 if len(p) > 0:
2268 p1node = p[0].filenode()
2264 p1node = p[0].filenode()
2269 if len(p) > 1:
2265 if len(p) > 1:
2270 p2node = p[1].filenode()
2266 p2node = p[1].filenode()
2271 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2267 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2272
2268
2273 for f in self._status.added:
2269 for f in self._status.added:
2274 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2270 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2275
2271
2276 for f in self._status.removed:
2272 for f in self._status.removed:
2277 if f in man:
2273 if f in man:
2278 del man[f]
2274 del man[f]
2279
2275
2280 return man
2276 return man
2281
2277
2282 @propertycache
2278 @propertycache
2283 def _status(self):
2279 def _status(self):
2284 """Calculate exact status from ``files`` specified at construction
2280 """Calculate exact status from ``files`` specified at construction
2285 """
2281 """
2286 man1 = self.p1().manifest()
2282 man1 = self.p1().manifest()
2287 p2 = self._parents[1]
2283 p2 = self._parents[1]
2288 # "1 < len(self._parents)" can't be used for checking
2284 # "1 < len(self._parents)" can't be used for checking
2289 # existence of the 2nd parent, because "memctx._parents" is
2285 # existence of the 2nd parent, because "memctx._parents" is
2290 # explicitly initialized by the list, of which length is 2.
2286 # explicitly initialized by the list, of which length is 2.
2291 if p2.node() != nullid:
2287 if p2.node() != nullid:
2292 man2 = p2.manifest()
2288 man2 = p2.manifest()
2293 managing = lambda f: f in man1 or f in man2
2289 managing = lambda f: f in man1 or f in man2
2294 else:
2290 else:
2295 managing = lambda f: f in man1
2291 managing = lambda f: f in man1
2296
2292
2297 modified, added, removed = [], [], []
2293 modified, added, removed = [], [], []
2298 for f in self._files:
2294 for f in self._files:
2299 if not managing(f):
2295 if not managing(f):
2300 added.append(f)
2296 added.append(f)
2301 elif self[f]:
2297 elif self[f]:
2302 modified.append(f)
2298 modified.append(f)
2303 else:
2299 else:
2304 removed.append(f)
2300 removed.append(f)
2305
2301
2306 return scmutil.status(modified, added, removed, [], [], [], [])
2302 return scmutil.status(modified, added, removed, [], [], [], [])
2307
2303
2308 class memfilectx(committablefilectx):
2304 class memfilectx(committablefilectx):
2309 """memfilectx represents an in-memory file to commit.
2305 """memfilectx represents an in-memory file to commit.
2310
2306
2311 See memctx and committablefilectx for more details.
2307 See memctx and committablefilectx for more details.
2312 """
2308 """
2313 def __init__(self, repo, changectx, path, data, islink=False,
2309 def __init__(self, repo, changectx, path, data, islink=False,
2314 isexec=False, copied=None):
2310 isexec=False, copied=None):
2315 """
2311 """
2316 path is the normalized file path relative to repository root.
2312 path is the normalized file path relative to repository root.
2317 data is the file content as a string.
2313 data is the file content as a string.
2318 islink is True if the file is a symbolic link.
2314 islink is True if the file is a symbolic link.
2319 isexec is True if the file is executable.
2315 isexec is True if the file is executable.
2320 copied is the source file path if current file was copied in the
2316 copied is the source file path if current file was copied in the
2321 revision being committed, or None."""
2317 revision being committed, or None."""
2322 super(memfilectx, self).__init__(repo, path, None, changectx)
2318 super(memfilectx, self).__init__(repo, path, None, changectx)
2323 self._data = data
2319 self._data = data
2324 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2320 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2325 self._copied = None
2321 self._copied = None
2326 if copied:
2322 if copied:
2327 self._copied = (copied, nullid)
2323 self._copied = (copied, nullid)
2328
2324
2329 def data(self):
2325 def data(self):
2330 return self._data
2326 return self._data
2331
2327
2332 def remove(self, ignoremissing=False):
2328 def remove(self, ignoremissing=False):
2333 """wraps unlink for a repo's working directory"""
2329 """wraps unlink for a repo's working directory"""
2334 # need to figure out what to do here
2330 # need to figure out what to do here
2335 del self._changectx[self._path]
2331 del self._changectx[self._path]
2336
2332
2337 def write(self, data, flags, **kwargs):
2333 def write(self, data, flags, **kwargs):
2338 """wraps repo.wwrite"""
2334 """wraps repo.wwrite"""
2339 self._data = data
2335 self._data = data
2340
2336
2341 class overlayfilectx(committablefilectx):
2337 class overlayfilectx(committablefilectx):
2342 """Like memfilectx but take an original filectx and optional parameters to
2338 """Like memfilectx but take an original filectx and optional parameters to
2343 override parts of it. This is useful when fctx.data() is expensive (i.e.
2339 override parts of it. This is useful when fctx.data() is expensive (i.e.
2344 flag processor is expensive) and raw data, flags, and filenode could be
2340 flag processor is expensive) and raw data, flags, and filenode could be
2345 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2341 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2346 """
2342 """
2347
2343
2348 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2344 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2349 copied=None, ctx=None):
2345 copied=None, ctx=None):
2350 """originalfctx: filecontext to duplicate
2346 """originalfctx: filecontext to duplicate
2351
2347
2352 datafunc: None or a function to override data (file content). It is a
2348 datafunc: None or a function to override data (file content). It is a
2353 function to be lazy. path, flags, copied, ctx: None or overridden value
2349 function to be lazy. path, flags, copied, ctx: None or overridden value
2354
2350
2355 copied could be (path, rev), or False. copied could also be just path,
2351 copied could be (path, rev), or False. copied could also be just path,
2356 and will be converted to (path, nullid). This simplifies some callers.
2352 and will be converted to (path, nullid). This simplifies some callers.
2357 """
2353 """
2358
2354
2359 if path is None:
2355 if path is None:
2360 path = originalfctx.path()
2356 path = originalfctx.path()
2361 if ctx is None:
2357 if ctx is None:
2362 ctx = originalfctx.changectx()
2358 ctx = originalfctx.changectx()
2363 ctxmatch = lambda: True
2359 ctxmatch = lambda: True
2364 else:
2360 else:
2365 ctxmatch = lambda: ctx == originalfctx.changectx()
2361 ctxmatch = lambda: ctx == originalfctx.changectx()
2366
2362
2367 repo = originalfctx.repo()
2363 repo = originalfctx.repo()
2368 flog = originalfctx.filelog()
2364 flog = originalfctx.filelog()
2369 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2365 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2370
2366
2371 if copied is None:
2367 if copied is None:
2372 copied = originalfctx.renamed()
2368 copied = originalfctx.renamed()
2373 copiedmatch = lambda: True
2369 copiedmatch = lambda: True
2374 else:
2370 else:
2375 if copied and not isinstance(copied, tuple):
2371 if copied and not isinstance(copied, tuple):
2376 # repo._filecommit will recalculate copyrev so nullid is okay
2372 # repo._filecommit will recalculate copyrev so nullid is okay
2377 copied = (copied, nullid)
2373 copied = (copied, nullid)
2378 copiedmatch = lambda: copied == originalfctx.renamed()
2374 copiedmatch = lambda: copied == originalfctx.renamed()
2379
2375
2380 # When data, copied (could affect data), ctx (could affect filelog
2376 # When data, copied (could affect data), ctx (could affect filelog
2381 # parents) are not overridden, rawdata, rawflags, and filenode may be
2377 # parents) are not overridden, rawdata, rawflags, and filenode may be
2382 # reused (repo._filecommit should double check filelog parents).
2378 # reused (repo._filecommit should double check filelog parents).
2383 #
2379 #
2384 # path, flags are not hashed in filelog (but in manifestlog) so they do
2380 # path, flags are not hashed in filelog (but in manifestlog) so they do
2385 # not affect reusable here.
2381 # not affect reusable here.
2386 #
2382 #
2387 # If ctx or copied is overridden to a same value with originalfctx,
2383 # If ctx or copied is overridden to a same value with originalfctx,
2388 # still consider it's reusable. originalfctx.renamed() may be a bit
2384 # still consider it's reusable. originalfctx.renamed() may be a bit
2389 # expensive so it's not called unless necessary. Assuming datafunc is
2385 # expensive so it's not called unless necessary. Assuming datafunc is
2390 # always expensive, do not call it for this "reusable" test.
2386 # always expensive, do not call it for this "reusable" test.
2391 reusable = datafunc is None and ctxmatch() and copiedmatch()
2387 reusable = datafunc is None and ctxmatch() and copiedmatch()
2392
2388
2393 if datafunc is None:
2389 if datafunc is None:
2394 datafunc = originalfctx.data
2390 datafunc = originalfctx.data
2395 if flags is None:
2391 if flags is None:
2396 flags = originalfctx.flags()
2392 flags = originalfctx.flags()
2397
2393
2398 self._datafunc = datafunc
2394 self._datafunc = datafunc
2399 self._flags = flags
2395 self._flags = flags
2400 self._copied = copied
2396 self._copied = copied
2401
2397
2402 if reusable:
2398 if reusable:
2403 # copy extra fields from originalfctx
2399 # copy extra fields from originalfctx
2404 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2400 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2405 for attr_ in attrs:
2401 for attr_ in attrs:
2406 if util.safehasattr(originalfctx, attr_):
2402 if util.safehasattr(originalfctx, attr_):
2407 setattr(self, attr_, getattr(originalfctx, attr_))
2403 setattr(self, attr_, getattr(originalfctx, attr_))
2408
2404
2409 def data(self):
2405 def data(self):
2410 return self._datafunc()
2406 return self._datafunc()
2411
2407
2412 class metadataonlyctx(committablectx):
2408 class metadataonlyctx(committablectx):
2413 """Like memctx but it's reusing the manifest of different commit.
2409 """Like memctx but it's reusing the manifest of different commit.
2414 Intended to be used by lightweight operations that are creating
2410 Intended to be used by lightweight operations that are creating
2415 metadata-only changes.
2411 metadata-only changes.
2416
2412
2417 Revision information is supplied at initialization time. 'repo' is the
2413 Revision information is supplied at initialization time. 'repo' is the
2418 current localrepo, 'ctx' is original revision which manifest we're reuisng
2414 current localrepo, 'ctx' is original revision which manifest we're reuisng
2419 'parents' is a sequence of two parent revisions identifiers (pass None for
2415 'parents' is a sequence of two parent revisions identifiers (pass None for
2420 every missing parent), 'text' is the commit.
2416 every missing parent), 'text' is the commit.
2421
2417
2422 user receives the committer name and defaults to current repository
2418 user receives the committer name and defaults to current repository
2423 username, date is the commit date in any format supported by
2419 username, date is the commit date in any format supported by
2424 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2420 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2425 metadata or is left empty.
2421 metadata or is left empty.
2426 """
2422 """
2427 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2423 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2428 date=None, extra=None, editor=False):
2424 date=None, extra=None, editor=False):
2429 if text is None:
2425 if text is None:
2430 text = originalctx.description()
2426 text = originalctx.description()
2431 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2427 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2432 self._rev = None
2428 self._rev = None
2433 self._node = None
2429 self._node = None
2434 self._originalctx = originalctx
2430 self._originalctx = originalctx
2435 self._manifestnode = originalctx.manifestnode()
2431 self._manifestnode = originalctx.manifestnode()
2436 if parents is None:
2432 if parents is None:
2437 parents = originalctx.parents()
2433 parents = originalctx.parents()
2438 else:
2434 else:
2439 parents = [repo[p] for p in parents if p is not None]
2435 parents = [repo[p] for p in parents if p is not None]
2440 parents = parents[:]
2436 parents = parents[:]
2441 while len(parents) < 2:
2437 while len(parents) < 2:
2442 parents.append(repo[nullid])
2438 parents.append(repo[nullid])
2443 p1, p2 = self._parents = parents
2439 p1, p2 = self._parents = parents
2444
2440
2445 # sanity check to ensure that the reused manifest parents are
2441 # sanity check to ensure that the reused manifest parents are
2446 # manifests of our commit parents
2442 # manifests of our commit parents
2447 mp1, mp2 = self.manifestctx().parents
2443 mp1, mp2 = self.manifestctx().parents
2448 if p1 != nullid and p1.manifestnode() != mp1:
2444 if p1 != nullid and p1.manifestnode() != mp1:
2449 raise RuntimeError('can\'t reuse the manifest: '
2445 raise RuntimeError('can\'t reuse the manifest: '
2450 'its p1 doesn\'t match the new ctx p1')
2446 'its p1 doesn\'t match the new ctx p1')
2451 if p2 != nullid and p2.manifestnode() != mp2:
2447 if p2 != nullid and p2.manifestnode() != mp2:
2452 raise RuntimeError('can\'t reuse the manifest: '
2448 raise RuntimeError('can\'t reuse the manifest: '
2453 'its p2 doesn\'t match the new ctx p2')
2449 'its p2 doesn\'t match the new ctx p2')
2454
2450
2455 self._files = originalctx.files()
2451 self._files = originalctx.files()
2456 self.substate = {}
2452 self.substate = {}
2457
2453
2458 if editor:
2454 if editor:
2459 self._text = editor(self._repo, self, [])
2455 self._text = editor(self._repo, self, [])
2460 self._repo.savecommitmessage(self._text)
2456 self._repo.savecommitmessage(self._text)
2461
2457
2462 def manifestnode(self):
2458 def manifestnode(self):
2463 return self._manifestnode
2459 return self._manifestnode
2464
2460
2465 @property
2461 @property
2466 def _manifestctx(self):
2462 def _manifestctx(self):
2467 return self._repo.manifestlog[self._manifestnode]
2463 return self._repo.manifestlog[self._manifestnode]
2468
2464
2469 def filectx(self, path, filelog=None):
2465 def filectx(self, path, filelog=None):
2470 return self._originalctx.filectx(path, filelog=filelog)
2466 return self._originalctx.filectx(path, filelog=filelog)
2471
2467
2472 def commit(self):
2468 def commit(self):
2473 """commit context to the repo"""
2469 """commit context to the repo"""
2474 return self._repo.commitctx(self)
2470 return self._repo.commitctx(self)
2475
2471
2476 @property
2472 @property
2477 def _manifest(self):
2473 def _manifest(self):
2478 return self._originalctx.manifest()
2474 return self._originalctx.manifest()
2479
2475
2480 @propertycache
2476 @propertycache
2481 def _status(self):
2477 def _status(self):
2482 """Calculate exact status from ``files`` specified in the ``origctx``
2478 """Calculate exact status from ``files`` specified in the ``origctx``
2483 and parents manifests.
2479 and parents manifests.
2484 """
2480 """
2485 man1 = self.p1().manifest()
2481 man1 = self.p1().manifest()
2486 p2 = self._parents[1]
2482 p2 = self._parents[1]
2487 # "1 < len(self._parents)" can't be used for checking
2483 # "1 < len(self._parents)" can't be used for checking
2488 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2484 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2489 # explicitly initialized by the list, of which length is 2.
2485 # explicitly initialized by the list, of which length is 2.
2490 if p2.node() != nullid:
2486 if p2.node() != nullid:
2491 man2 = p2.manifest()
2487 man2 = p2.manifest()
2492 managing = lambda f: f in man1 or f in man2
2488 managing = lambda f: f in man1 or f in man2
2493 else:
2489 else:
2494 managing = lambda f: f in man1
2490 managing = lambda f: f in man1
2495
2491
2496 modified, added, removed = [], [], []
2492 modified, added, removed = [], [], []
2497 for f in self._files:
2493 for f in self._files:
2498 if not managing(f):
2494 if not managing(f):
2499 added.append(f)
2495 added.append(f)
2500 elif f in self:
2496 elif f in self:
2501 modified.append(f)
2497 modified.append(f)
2502 else:
2498 else:
2503 removed.append(f)
2499 removed.append(f)
2504
2500
2505 return scmutil.status(modified, added, removed, [], [], [], [])
2501 return scmutil.status(modified, added, removed, [], [], [], [])
2506
2502
2507 class arbitraryfilectx(object):
2503 class arbitraryfilectx(object):
2508 """Allows you to use filectx-like functions on a file in an arbitrary
2504 """Allows you to use filectx-like functions on a file in an arbitrary
2509 location on disk, possibly not in the working directory.
2505 location on disk, possibly not in the working directory.
2510 """
2506 """
2511 def __init__(self, path, repo=None):
2507 def __init__(self, path, repo=None):
2512 # Repo is optional because contrib/simplemerge uses this class.
2508 # Repo is optional because contrib/simplemerge uses this class.
2513 self._repo = repo
2509 self._repo = repo
2514 self._path = path
2510 self._path = path
2515
2511
2516 def cmp(self, fctx):
2512 def cmp(self, fctx):
2517 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2513 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2518 # path if either side is a symlink.
2514 # path if either side is a symlink.
2519 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2515 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2520 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2516 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2521 # Add a fast-path for merge if both sides are disk-backed.
2517 # Add a fast-path for merge if both sides are disk-backed.
2522 # Note that filecmp uses the opposite return values (True if same)
2518 # Note that filecmp uses the opposite return values (True if same)
2523 # from our cmp functions (True if different).
2519 # from our cmp functions (True if different).
2524 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2520 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2525 return self.data() != fctx.data()
2521 return self.data() != fctx.data()
2526
2522
2527 def path(self):
2523 def path(self):
2528 return self._path
2524 return self._path
2529
2525
2530 def flags(self):
2526 def flags(self):
2531 return ''
2527 return ''
2532
2528
2533 def data(self):
2529 def data(self):
2534 return util.readfile(self._path)
2530 return util.readfile(self._path)
2535
2531
2536 def decodeddata(self):
2532 def decodeddata(self):
2537 with open(self._path, "rb") as f:
2533 with open(self._path, "rb") as f:
2538 return f.read()
2534 return f.read()
2539
2535
2540 def remove(self):
2536 def remove(self):
2541 util.unlink(self._path)
2537 util.unlink(self._path)
2542
2538
2543 def write(self, data, flags, **kwargs):
2539 def write(self, data, flags, **kwargs):
2544 assert not flags
2540 assert not flags
2545 with open(self._path, "w") as f:
2541 with open(self._path, "w") as f:
2546 f.write(data)
2542 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now