##// END OF EJS Templates
context: add obsolete() method to basefilectx...
av6 -
r35087:a9454beb default
parent child Browse files
Show More
@@ -1,2604 +1,2606
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 repoview,
43 repoview,
44 revlog,
44 revlog,
45 scmutil,
45 scmutil,
46 sparse,
46 sparse,
47 subrepo,
47 subrepo,
48 util,
48 util,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 nonascii = re.compile(r'[^\x21-\x7f]').search
53 nonascii = re.compile(r'[^\x21-\x7f]').search
54
54
55 class basectx(object):
55 class basectx(object):
56 """A basectx object represents the common logic for its children:
56 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
57 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
58 workingctx: a context that represents the working directory and can
59 be committed,
59 be committed,
60 memctx: a context that represents changes in-memory and can also
60 memctx: a context that represents changes in-memory and can also
61 be committed."""
61 be committed."""
62 def __new__(cls, repo, changeid='', *args, **kwargs):
62 def __new__(cls, repo, changeid='', *args, **kwargs):
63 if isinstance(changeid, basectx):
63 if isinstance(changeid, basectx):
64 return changeid
64 return changeid
65
65
66 o = super(basectx, cls).__new__(cls)
66 o = super(basectx, cls).__new__(cls)
67
67
68 o._repo = repo
68 o._repo = repo
69 o._rev = nullrev
69 o._rev = nullrev
70 o._node = nullid
70 o._node = nullid
71
71
72 return o
72 return o
73
73
74 def __bytes__(self):
74 def __bytes__(self):
75 return short(self.node())
75 return short(self.node())
76
76
77 __str__ = encoding.strmethod(__bytes__)
77 __str__ = encoding.strmethod(__bytes__)
78
78
79 def __int__(self):
79 def __int__(self):
80 return self.rev()
80 return self.rev()
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepo.state(self, self._repo.ui)
175 return subrepo.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def unstable(self):
208 def unstable(self):
209 msg = ("'context.unstable' is deprecated, "
209 msg = ("'context.unstable' is deprecated, "
210 "use 'context.orphan'")
210 "use 'context.orphan'")
211 self._repo.ui.deprecwarn(msg, '4.4')
211 self._repo.ui.deprecwarn(msg, '4.4')
212 return self.orphan()
212 return self.orphan()
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete but it's ancestor are"""
215 """True if the changeset is not obsolete but it's ancestor are"""
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
217
217
218 def bumped(self):
218 def bumped(self):
219 msg = ("'context.bumped' is deprecated, "
219 msg = ("'context.bumped' is deprecated, "
220 "use 'context.phasedivergent'")
220 "use 'context.phasedivergent'")
221 self._repo.ui.deprecwarn(msg, '4.4')
221 self._repo.ui.deprecwarn(msg, '4.4')
222 return self.phasedivergent()
222 return self.phasedivergent()
223
223
224 def phasedivergent(self):
224 def phasedivergent(self):
225 """True if the changeset try to be a successor of a public changeset
225 """True if the changeset try to be a successor of a public changeset
226
226
227 Only non-public and non-obsolete changesets may be bumped.
227 Only non-public and non-obsolete changesets may be bumped.
228 """
228 """
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
230
230
231 def divergent(self):
231 def divergent(self):
232 msg = ("'context.divergent' is deprecated, "
232 msg = ("'context.divergent' is deprecated, "
233 "use 'context.contentdivergent'")
233 "use 'context.contentdivergent'")
234 self._repo.ui.deprecwarn(msg, '4.4')
234 self._repo.ui.deprecwarn(msg, '4.4')
235 return self.contentdivergent()
235 return self.contentdivergent()
236
236
237 def contentdivergent(self):
237 def contentdivergent(self):
238 """Is a successors of a changeset with multiple possible successors set
238 """Is a successors of a changeset with multiple possible successors set
239
239
240 Only non-public and non-obsolete changesets may be divergent.
240 Only non-public and non-obsolete changesets may be divergent.
241 """
241 """
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
243
243
244 def troubled(self):
244 def troubled(self):
245 msg = ("'context.troubled' is deprecated, "
245 msg = ("'context.troubled' is deprecated, "
246 "use 'context.isunstable'")
246 "use 'context.isunstable'")
247 self._repo.ui.deprecwarn(msg, '4.4')
247 self._repo.ui.deprecwarn(msg, '4.4')
248 return self.isunstable()
248 return self.isunstable()
249
249
250 def isunstable(self):
250 def isunstable(self):
251 """True if the changeset is either unstable, bumped or divergent"""
251 """True if the changeset is either unstable, bumped or divergent"""
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
253
253
254 def troubles(self):
254 def troubles(self):
255 """Keep the old version around in order to avoid breaking extensions
255 """Keep the old version around in order to avoid breaking extensions
256 about different return values.
256 about different return values.
257 """
257 """
258 msg = ("'context.troubles' is deprecated, "
258 msg = ("'context.troubles' is deprecated, "
259 "use 'context.instabilities'")
259 "use 'context.instabilities'")
260 self._repo.ui.deprecwarn(msg, '4.4')
260 self._repo.ui.deprecwarn(msg, '4.4')
261
261
262 troubles = []
262 troubles = []
263 if self.orphan():
263 if self.orphan():
264 troubles.append('orphan')
264 troubles.append('orphan')
265 if self.phasedivergent():
265 if self.phasedivergent():
266 troubles.append('bumped')
266 troubles.append('bumped')
267 if self.contentdivergent():
267 if self.contentdivergent():
268 troubles.append('divergent')
268 troubles.append('divergent')
269 return troubles
269 return troubles
270
270
271 def instabilities(self):
271 def instabilities(self):
272 """return the list of instabilities affecting this changeset.
272 """return the list of instabilities affecting this changeset.
273
273
274 Instabilities are returned as strings. possible values are:
274 Instabilities are returned as strings. possible values are:
275 - orphan,
275 - orphan,
276 - phase-divergent,
276 - phase-divergent,
277 - content-divergent.
277 - content-divergent.
278 """
278 """
279 instabilities = []
279 instabilities = []
280 if self.orphan():
280 if self.orphan():
281 instabilities.append('orphan')
281 instabilities.append('orphan')
282 if self.phasedivergent():
282 if self.phasedivergent():
283 instabilities.append('phase-divergent')
283 instabilities.append('phase-divergent')
284 if self.contentdivergent():
284 if self.contentdivergent():
285 instabilities.append('content-divergent')
285 instabilities.append('content-divergent')
286 return instabilities
286 return instabilities
287
287
288 def parents(self):
288 def parents(self):
289 """return contexts for each parent changeset"""
289 """return contexts for each parent changeset"""
290 return self._parents
290 return self._parents
291
291
292 def p1(self):
292 def p1(self):
293 return self._parents[0]
293 return self._parents[0]
294
294
295 def p2(self):
295 def p2(self):
296 parents = self._parents
296 parents = self._parents
297 if len(parents) == 2:
297 if len(parents) == 2:
298 return parents[1]
298 return parents[1]
299 return changectx(self._repo, nullrev)
299 return changectx(self._repo, nullrev)
300
300
301 def _fileinfo(self, path):
301 def _fileinfo(self, path):
302 if r'_manifest' in self.__dict__:
302 if r'_manifest' in self.__dict__:
303 try:
303 try:
304 return self._manifest[path], self._manifest.flags(path)
304 return self._manifest[path], self._manifest.flags(path)
305 except KeyError:
305 except KeyError:
306 raise error.ManifestLookupError(self._node, path,
306 raise error.ManifestLookupError(self._node, path,
307 _('not found in manifest'))
307 _('not found in manifest'))
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
309 if path in self._manifestdelta:
309 if path in self._manifestdelta:
310 return (self._manifestdelta[path],
310 return (self._manifestdelta[path],
311 self._manifestdelta.flags(path))
311 self._manifestdelta.flags(path))
312 mfl = self._repo.manifestlog
312 mfl = self._repo.manifestlog
313 try:
313 try:
314 node, flag = mfl[self._changeset.manifest].find(path)
314 node, flag = mfl[self._changeset.manifest].find(path)
315 except KeyError:
315 except KeyError:
316 raise error.ManifestLookupError(self._node, path,
316 raise error.ManifestLookupError(self._node, path,
317 _('not found in manifest'))
317 _('not found in manifest'))
318
318
319 return node, flag
319 return node, flag
320
320
321 def filenode(self, path):
321 def filenode(self, path):
322 return self._fileinfo(path)[0]
322 return self._fileinfo(path)[0]
323
323
324 def flags(self, path):
324 def flags(self, path):
325 try:
325 try:
326 return self._fileinfo(path)[1]
326 return self._fileinfo(path)[1]
327 except error.LookupError:
327 except error.LookupError:
328 return ''
328 return ''
329
329
330 def sub(self, path, allowcreate=True):
330 def sub(self, path, allowcreate=True):
331 '''return a subrepo for the stored revision of path, never wdir()'''
331 '''return a subrepo for the stored revision of path, never wdir()'''
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
333
333
334 def nullsub(self, path, pctx):
334 def nullsub(self, path, pctx):
335 return subrepo.nullsubrepo(self, path, pctx)
335 return subrepo.nullsubrepo(self, path, pctx)
336
336
337 def workingsub(self, path):
337 def workingsub(self, path):
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
339 context.
339 context.
340 '''
340 '''
341 return subrepo.subrepo(self, path, allowwdir=True)
341 return subrepo.subrepo(self, path, allowwdir=True)
342
342
343 def match(self, pats=None, include=None, exclude=None, default='glob',
343 def match(self, pats=None, include=None, exclude=None, default='glob',
344 listsubrepos=False, badfn=None):
344 listsubrepos=False, badfn=None):
345 r = self._repo
345 r = self._repo
346 return matchmod.match(r.root, r.getcwd(), pats,
346 return matchmod.match(r.root, r.getcwd(), pats,
347 include, exclude, default,
347 include, exclude, default,
348 auditor=r.nofsauditor, ctx=self,
348 auditor=r.nofsauditor, ctx=self,
349 listsubrepos=listsubrepos, badfn=badfn)
349 listsubrepos=listsubrepos, badfn=badfn)
350
350
351 def diff(self, ctx2=None, match=None, **opts):
351 def diff(self, ctx2=None, match=None, **opts):
352 """Returns a diff generator for the given contexts and matcher"""
352 """Returns a diff generator for the given contexts and matcher"""
353 if ctx2 is None:
353 if ctx2 is None:
354 ctx2 = self.p1()
354 ctx2 = self.p1()
355 if ctx2 is not None:
355 if ctx2 is not None:
356 ctx2 = self._repo[ctx2]
356 ctx2 = self._repo[ctx2]
357 diffopts = patch.diffopts(self._repo.ui, opts)
357 diffopts = patch.diffopts(self._repo.ui, opts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
359
359
360 def dirs(self):
360 def dirs(self):
361 return self._manifest.dirs()
361 return self._manifest.dirs()
362
362
363 def hasdir(self, dir):
363 def hasdir(self, dir):
364 return self._manifest.hasdir(dir)
364 return self._manifest.hasdir(dir)
365
365
366 def status(self, other=None, match=None, listignored=False,
366 def status(self, other=None, match=None, listignored=False,
367 listclean=False, listunknown=False, listsubrepos=False):
367 listclean=False, listunknown=False, listsubrepos=False):
368 """return status of files between two nodes or node and working
368 """return status of files between two nodes or node and working
369 directory.
369 directory.
370
370
371 If other is None, compare this node with working directory.
371 If other is None, compare this node with working directory.
372
372
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
374 """
374 """
375
375
376 ctx1 = self
376 ctx1 = self
377 ctx2 = self._repo[other]
377 ctx2 = self._repo[other]
378
378
379 # This next code block is, admittedly, fragile logic that tests for
379 # This next code block is, admittedly, fragile logic that tests for
380 # reversing the contexts and wouldn't need to exist if it weren't for
380 # reversing the contexts and wouldn't need to exist if it weren't for
381 # the fast (and common) code path of comparing the working directory
381 # the fast (and common) code path of comparing the working directory
382 # with its first parent.
382 # with its first parent.
383 #
383 #
384 # What we're aiming for here is the ability to call:
384 # What we're aiming for here is the ability to call:
385 #
385 #
386 # workingctx.status(parentctx)
386 # workingctx.status(parentctx)
387 #
387 #
388 # If we always built the manifest for each context and compared those,
388 # If we always built the manifest for each context and compared those,
389 # then we'd be done. But the special case of the above call means we
389 # then we'd be done. But the special case of the above call means we
390 # just copy the manifest of the parent.
390 # just copy the manifest of the parent.
391 reversed = False
391 reversed = False
392 if (not isinstance(ctx1, changectx)
392 if (not isinstance(ctx1, changectx)
393 and isinstance(ctx2, changectx)):
393 and isinstance(ctx2, changectx)):
394 reversed = True
394 reversed = True
395 ctx1, ctx2 = ctx2, ctx1
395 ctx1, ctx2 = ctx2, ctx1
396
396
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
398 match = ctx2._matchstatus(ctx1, match)
398 match = ctx2._matchstatus(ctx1, match)
399 r = scmutil.status([], [], [], [], [], [], [])
399 r = scmutil.status([], [], [], [], [], [], [])
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
401 listunknown)
401 listunknown)
402
402
403 if reversed:
403 if reversed:
404 # Reverse added and removed. Clear deleted, unknown and ignored as
404 # Reverse added and removed. Clear deleted, unknown and ignored as
405 # these make no sense to reverse.
405 # these make no sense to reverse.
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
407 r.clean)
407 r.clean)
408
408
409 if listsubrepos:
409 if listsubrepos:
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
411 try:
411 try:
412 rev2 = ctx2.subrev(subpath)
412 rev2 = ctx2.subrev(subpath)
413 except KeyError:
413 except KeyError:
414 # A subrepo that existed in node1 was deleted between
414 # A subrepo that existed in node1 was deleted between
415 # node1 and node2 (inclusive). Thus, ctx2's substate
415 # node1 and node2 (inclusive). Thus, ctx2's substate
416 # won't contain that subpath. The best we can do ignore it.
416 # won't contain that subpath. The best we can do ignore it.
417 rev2 = None
417 rev2 = None
418 submatch = matchmod.subdirmatcher(subpath, match)
418 submatch = matchmod.subdirmatcher(subpath, match)
419 s = sub.status(rev2, match=submatch, ignored=listignored,
419 s = sub.status(rev2, match=submatch, ignored=listignored,
420 clean=listclean, unknown=listunknown,
420 clean=listclean, unknown=listunknown,
421 listsubrepos=True)
421 listsubrepos=True)
422 for rfiles, sfiles in zip(r, s):
422 for rfiles, sfiles in zip(r, s):
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
424
424
425 for l in r:
425 for l in r:
426 l.sort()
426 l.sort()
427
427
428 return r
428 return r
429
429
430 def _filterederror(repo, changeid):
430 def _filterederror(repo, changeid):
431 """build an exception to be raised about a filtered changeid
431 """build an exception to be raised about a filtered changeid
432
432
433 This is extracted in a function to help extensions (eg: evolve) to
433 This is extracted in a function to help extensions (eg: evolve) to
434 experiment with various message variants."""
434 experiment with various message variants."""
435 if repo.filtername.startswith('visible'):
435 if repo.filtername.startswith('visible'):
436 msg = _("hidden revision '%s'") % changeid
436 msg = _("hidden revision '%s'") % changeid
437 hint = _('use --hidden to access hidden revisions')
437 hint = _('use --hidden to access hidden revisions')
438 return error.FilteredRepoLookupError(msg, hint=hint)
438 return error.FilteredRepoLookupError(msg, hint=hint)
439 msg = _("filtered revision '%s' (not in '%s' subset)")
439 msg = _("filtered revision '%s' (not in '%s' subset)")
440 msg %= (changeid, repo.filtername)
440 msg %= (changeid, repo.filtername)
441 return error.FilteredRepoLookupError(msg)
441 return error.FilteredRepoLookupError(msg)
442
442
443 class changectx(basectx):
443 class changectx(basectx):
444 """A changecontext object makes access to data related to a particular
444 """A changecontext object makes access to data related to a particular
445 changeset convenient. It represents a read-only context already present in
445 changeset convenient. It represents a read-only context already present in
446 the repo."""
446 the repo."""
447 def __init__(self, repo, changeid=''):
447 def __init__(self, repo, changeid=''):
448 """changeid is a revision number, node, or tag"""
448 """changeid is a revision number, node, or tag"""
449
449
450 # since basectx.__new__ already took care of copying the object, we
450 # since basectx.__new__ already took care of copying the object, we
451 # don't need to do anything in __init__, so we just exit here
451 # don't need to do anything in __init__, so we just exit here
452 if isinstance(changeid, basectx):
452 if isinstance(changeid, basectx):
453 return
453 return
454
454
455 if changeid == '':
455 if changeid == '':
456 changeid = '.'
456 changeid = '.'
457 self._repo = repo
457 self._repo = repo
458
458
459 try:
459 try:
460 if isinstance(changeid, int):
460 if isinstance(changeid, int):
461 self._node = repo.changelog.node(changeid)
461 self._node = repo.changelog.node(changeid)
462 self._rev = changeid
462 self._rev = changeid
463 return
463 return
464 if not pycompat.ispy3 and isinstance(changeid, long):
464 if not pycompat.ispy3 and isinstance(changeid, long):
465 changeid = str(changeid)
465 changeid = str(changeid)
466 if changeid == 'null':
466 if changeid == 'null':
467 self._node = nullid
467 self._node = nullid
468 self._rev = nullrev
468 self._rev = nullrev
469 return
469 return
470 if changeid == 'tip':
470 if changeid == 'tip':
471 self._node = repo.changelog.tip()
471 self._node = repo.changelog.tip()
472 self._rev = repo.changelog.rev(self._node)
472 self._rev = repo.changelog.rev(self._node)
473 return
473 return
474 if (changeid == '.'
474 if (changeid == '.'
475 or repo.local() and changeid == repo.dirstate.p1()):
475 or repo.local() and changeid == repo.dirstate.p1()):
476 # this is a hack to delay/avoid loading obsmarkers
476 # this is a hack to delay/avoid loading obsmarkers
477 # when we know that '.' won't be hidden
477 # when we know that '.' won't be hidden
478 self._node = repo.dirstate.p1()
478 self._node = repo.dirstate.p1()
479 self._rev = repo.unfiltered().changelog.rev(self._node)
479 self._rev = repo.unfiltered().changelog.rev(self._node)
480 return
480 return
481 if len(changeid) == 20:
481 if len(changeid) == 20:
482 try:
482 try:
483 self._node = changeid
483 self._node = changeid
484 self._rev = repo.changelog.rev(changeid)
484 self._rev = repo.changelog.rev(changeid)
485 return
485 return
486 except error.FilteredRepoLookupError:
486 except error.FilteredRepoLookupError:
487 raise
487 raise
488 except LookupError:
488 except LookupError:
489 pass
489 pass
490
490
491 try:
491 try:
492 r = int(changeid)
492 r = int(changeid)
493 if '%d' % r != changeid:
493 if '%d' % r != changeid:
494 raise ValueError
494 raise ValueError
495 l = len(repo.changelog)
495 l = len(repo.changelog)
496 if r < 0:
496 if r < 0:
497 r += l
497 r += l
498 if r < 0 or r >= l and r != wdirrev:
498 if r < 0 or r >= l and r != wdirrev:
499 raise ValueError
499 raise ValueError
500 self._rev = r
500 self._rev = r
501 self._node = repo.changelog.node(r)
501 self._node = repo.changelog.node(r)
502 return
502 return
503 except error.FilteredIndexError:
503 except error.FilteredIndexError:
504 raise
504 raise
505 except (ValueError, OverflowError, IndexError):
505 except (ValueError, OverflowError, IndexError):
506 pass
506 pass
507
507
508 if len(changeid) == 40:
508 if len(changeid) == 40:
509 try:
509 try:
510 self._node = bin(changeid)
510 self._node = bin(changeid)
511 self._rev = repo.changelog.rev(self._node)
511 self._rev = repo.changelog.rev(self._node)
512 return
512 return
513 except error.FilteredLookupError:
513 except error.FilteredLookupError:
514 raise
514 raise
515 except (TypeError, LookupError):
515 except (TypeError, LookupError):
516 pass
516 pass
517
517
518 # lookup bookmarks through the name interface
518 # lookup bookmarks through the name interface
519 try:
519 try:
520 self._node = repo.names.singlenode(repo, changeid)
520 self._node = repo.names.singlenode(repo, changeid)
521 self._rev = repo.changelog.rev(self._node)
521 self._rev = repo.changelog.rev(self._node)
522 return
522 return
523 except KeyError:
523 except KeyError:
524 pass
524 pass
525 except error.FilteredRepoLookupError:
525 except error.FilteredRepoLookupError:
526 raise
526 raise
527 except error.RepoLookupError:
527 except error.RepoLookupError:
528 pass
528 pass
529
529
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
531 if self._node is not None:
531 if self._node is not None:
532 self._rev = repo.changelog.rev(self._node)
532 self._rev = repo.changelog.rev(self._node)
533 return
533 return
534
534
535 # lookup failed
535 # lookup failed
536 # check if it might have come from damaged dirstate
536 # check if it might have come from damaged dirstate
537 #
537 #
538 # XXX we could avoid the unfiltered if we had a recognizable
538 # XXX we could avoid the unfiltered if we had a recognizable
539 # exception for filtered changeset access
539 # exception for filtered changeset access
540 if (repo.local()
540 if (repo.local()
541 and changeid in repo.unfiltered().dirstate.parents()):
541 and changeid in repo.unfiltered().dirstate.parents()):
542 msg = _("working directory has unknown parent '%s'!")
542 msg = _("working directory has unknown parent '%s'!")
543 raise error.Abort(msg % short(changeid))
543 raise error.Abort(msg % short(changeid))
544 try:
544 try:
545 if len(changeid) == 20 and nonascii(changeid):
545 if len(changeid) == 20 and nonascii(changeid):
546 changeid = hex(changeid)
546 changeid = hex(changeid)
547 except TypeError:
547 except TypeError:
548 pass
548 pass
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, changeid)
551 raise _filterederror(repo, changeid)
552 except IndexError:
552 except IndexError:
553 pass
553 pass
554 raise error.RepoLookupError(
554 raise error.RepoLookupError(
555 _("unknown revision '%s'") % changeid)
555 _("unknown revision '%s'") % changeid)
556
556
557 def __hash__(self):
557 def __hash__(self):
558 try:
558 try:
559 return hash(self._rev)
559 return hash(self._rev)
560 except AttributeError:
560 except AttributeError:
561 return id(self)
561 return id(self)
562
562
563 def __nonzero__(self):
563 def __nonzero__(self):
564 return self._rev != nullrev
564 return self._rev != nullrev
565
565
566 __bool__ = __nonzero__
566 __bool__ = __nonzero__
567
567
568 @propertycache
568 @propertycache
569 def _changeset(self):
569 def _changeset(self):
570 return self._repo.changelog.changelogrevision(self.rev())
570 return self._repo.changelog.changelogrevision(self.rev())
571
571
572 @propertycache
572 @propertycache
573 def _manifest(self):
573 def _manifest(self):
574 return self._manifestctx.read()
574 return self._manifestctx.read()
575
575
576 @property
576 @property
577 def _manifestctx(self):
577 def _manifestctx(self):
578 return self._repo.manifestlog[self._changeset.manifest]
578 return self._repo.manifestlog[self._changeset.manifest]
579
579
580 @propertycache
580 @propertycache
581 def _manifestdelta(self):
581 def _manifestdelta(self):
582 return self._manifestctx.readdelta()
582 return self._manifestctx.readdelta()
583
583
584 @propertycache
584 @propertycache
585 def _parents(self):
585 def _parents(self):
586 repo = self._repo
586 repo = self._repo
587 p1, p2 = repo.changelog.parentrevs(self._rev)
587 p1, p2 = repo.changelog.parentrevs(self._rev)
588 if p2 == nullrev:
588 if p2 == nullrev:
589 return [changectx(repo, p1)]
589 return [changectx(repo, p1)]
590 return [changectx(repo, p1), changectx(repo, p2)]
590 return [changectx(repo, p1), changectx(repo, p2)]
591
591
592 def changeset(self):
592 def changeset(self):
593 c = self._changeset
593 c = self._changeset
594 return (
594 return (
595 c.manifest,
595 c.manifest,
596 c.user,
596 c.user,
597 c.date,
597 c.date,
598 c.files,
598 c.files,
599 c.description,
599 c.description,
600 c.extra,
600 c.extra,
601 )
601 )
602 def manifestnode(self):
602 def manifestnode(self):
603 return self._changeset.manifest
603 return self._changeset.manifest
604
604
605 def user(self):
605 def user(self):
606 return self._changeset.user
606 return self._changeset.user
607 def date(self):
607 def date(self):
608 return self._changeset.date
608 return self._changeset.date
609 def files(self):
609 def files(self):
610 return self._changeset.files
610 return self._changeset.files
611 def description(self):
611 def description(self):
612 return self._changeset.description
612 return self._changeset.description
613 def branch(self):
613 def branch(self):
614 return encoding.tolocal(self._changeset.extra.get("branch"))
614 return encoding.tolocal(self._changeset.extra.get("branch"))
615 def closesbranch(self):
615 def closesbranch(self):
616 return 'close' in self._changeset.extra
616 return 'close' in self._changeset.extra
617 def extra(self):
617 def extra(self):
618 return self._changeset.extra
618 return self._changeset.extra
619 def tags(self):
619 def tags(self):
620 return self._repo.nodetags(self._node)
620 return self._repo.nodetags(self._node)
621 def bookmarks(self):
621 def bookmarks(self):
622 return self._repo.nodebookmarks(self._node)
622 return self._repo.nodebookmarks(self._node)
623 def phase(self):
623 def phase(self):
624 return self._repo._phasecache.phase(self._repo, self._rev)
624 return self._repo._phasecache.phase(self._repo, self._rev)
625 def hidden(self):
625 def hidden(self):
626 return self._rev in repoview.filterrevs(self._repo, 'visible')
626 return self._rev in repoview.filterrevs(self._repo, 'visible')
627
627
628 def isinmemory(self):
628 def isinmemory(self):
629 return False
629 return False
630
630
631 def children(self):
631 def children(self):
632 """return contexts for each child changeset"""
632 """return contexts for each child changeset"""
633 c = self._repo.changelog.children(self._node)
633 c = self._repo.changelog.children(self._node)
634 return [changectx(self._repo, x) for x in c]
634 return [changectx(self._repo, x) for x in c]
635
635
636 def ancestors(self):
636 def ancestors(self):
637 for a in self._repo.changelog.ancestors([self._rev]):
637 for a in self._repo.changelog.ancestors([self._rev]):
638 yield changectx(self._repo, a)
638 yield changectx(self._repo, a)
639
639
640 def descendants(self):
640 def descendants(self):
641 for d in self._repo.changelog.descendants([self._rev]):
641 for d in self._repo.changelog.descendants([self._rev]):
642 yield changectx(self._repo, d)
642 yield changectx(self._repo, d)
643
643
644 def filectx(self, path, fileid=None, filelog=None):
644 def filectx(self, path, fileid=None, filelog=None):
645 """get a file context from this changeset"""
645 """get a file context from this changeset"""
646 if fileid is None:
646 if fileid is None:
647 fileid = self.filenode(path)
647 fileid = self.filenode(path)
648 return filectx(self._repo, path, fileid=fileid,
648 return filectx(self._repo, path, fileid=fileid,
649 changectx=self, filelog=filelog)
649 changectx=self, filelog=filelog)
650
650
651 def ancestor(self, c2, warn=False):
651 def ancestor(self, c2, warn=False):
652 """return the "best" ancestor context of self and c2
652 """return the "best" ancestor context of self and c2
653
653
654 If there are multiple candidates, it will show a message and check
654 If there are multiple candidates, it will show a message and check
655 merge.preferancestor configuration before falling back to the
655 merge.preferancestor configuration before falling back to the
656 revlog ancestor."""
656 revlog ancestor."""
657 # deal with workingctxs
657 # deal with workingctxs
658 n2 = c2._node
658 n2 = c2._node
659 if n2 is None:
659 if n2 is None:
660 n2 = c2._parents[0]._node
660 n2 = c2._parents[0]._node
661 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
661 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
662 if not cahs:
662 if not cahs:
663 anc = nullid
663 anc = nullid
664 elif len(cahs) == 1:
664 elif len(cahs) == 1:
665 anc = cahs[0]
665 anc = cahs[0]
666 else:
666 else:
667 # experimental config: merge.preferancestor
667 # experimental config: merge.preferancestor
668 for r in self._repo.ui.configlist('merge', 'preferancestor'):
668 for r in self._repo.ui.configlist('merge', 'preferancestor'):
669 try:
669 try:
670 ctx = changectx(self._repo, r)
670 ctx = changectx(self._repo, r)
671 except error.RepoLookupError:
671 except error.RepoLookupError:
672 continue
672 continue
673 anc = ctx.node()
673 anc = ctx.node()
674 if anc in cahs:
674 if anc in cahs:
675 break
675 break
676 else:
676 else:
677 anc = self._repo.changelog.ancestor(self._node, n2)
677 anc = self._repo.changelog.ancestor(self._node, n2)
678 if warn:
678 if warn:
679 self._repo.ui.status(
679 self._repo.ui.status(
680 (_("note: using %s as ancestor of %s and %s\n") %
680 (_("note: using %s as ancestor of %s and %s\n") %
681 (short(anc), short(self._node), short(n2))) +
681 (short(anc), short(self._node), short(n2))) +
682 ''.join(_(" alternatively, use --config "
682 ''.join(_(" alternatively, use --config "
683 "merge.preferancestor=%s\n") %
683 "merge.preferancestor=%s\n") %
684 short(n) for n in sorted(cahs) if n != anc))
684 short(n) for n in sorted(cahs) if n != anc))
685 return changectx(self._repo, anc)
685 return changectx(self._repo, anc)
686
686
687 def descendant(self, other):
687 def descendant(self, other):
688 """True if other is descendant of this changeset"""
688 """True if other is descendant of this changeset"""
689 return self._repo.changelog.descendant(self._rev, other._rev)
689 return self._repo.changelog.descendant(self._rev, other._rev)
690
690
691 def walk(self, match):
691 def walk(self, match):
692 '''Generates matching file names.'''
692 '''Generates matching file names.'''
693
693
694 # Wrap match.bad method to have message with nodeid
694 # Wrap match.bad method to have message with nodeid
695 def bad(fn, msg):
695 def bad(fn, msg):
696 # The manifest doesn't know about subrepos, so don't complain about
696 # The manifest doesn't know about subrepos, so don't complain about
697 # paths into valid subrepos.
697 # paths into valid subrepos.
698 if any(fn == s or fn.startswith(s + '/')
698 if any(fn == s or fn.startswith(s + '/')
699 for s in self.substate):
699 for s in self.substate):
700 return
700 return
701 match.bad(fn, _('no such file in rev %s') % self)
701 match.bad(fn, _('no such file in rev %s') % self)
702
702
703 m = matchmod.badmatch(match, bad)
703 m = matchmod.badmatch(match, bad)
704 return self._manifest.walk(m)
704 return self._manifest.walk(m)
705
705
706 def matches(self, match):
706 def matches(self, match):
707 return self.walk(match)
707 return self.walk(match)
708
708
709 class basefilectx(object):
709 class basefilectx(object):
710 """A filecontext object represents the common logic for its children:
710 """A filecontext object represents the common logic for its children:
711 filectx: read-only access to a filerevision that is already present
711 filectx: read-only access to a filerevision that is already present
712 in the repo,
712 in the repo,
713 workingfilectx: a filecontext that represents files from the working
713 workingfilectx: a filecontext that represents files from the working
714 directory,
714 directory,
715 memfilectx: a filecontext that represents files in-memory,
715 memfilectx: a filecontext that represents files in-memory,
716 overlayfilectx: duplicate another filecontext with some fields overridden.
716 overlayfilectx: duplicate another filecontext with some fields overridden.
717 """
717 """
718 @propertycache
718 @propertycache
719 def _filelog(self):
719 def _filelog(self):
720 return self._repo.file(self._path)
720 return self._repo.file(self._path)
721
721
722 @propertycache
722 @propertycache
723 def _changeid(self):
723 def _changeid(self):
724 if r'_changeid' in self.__dict__:
724 if r'_changeid' in self.__dict__:
725 return self._changeid
725 return self._changeid
726 elif r'_changectx' in self.__dict__:
726 elif r'_changectx' in self.__dict__:
727 return self._changectx.rev()
727 return self._changectx.rev()
728 elif r'_descendantrev' in self.__dict__:
728 elif r'_descendantrev' in self.__dict__:
729 # this file context was created from a revision with a known
729 # this file context was created from a revision with a known
730 # descendant, we can (lazily) correct for linkrev aliases
730 # descendant, we can (lazily) correct for linkrev aliases
731 return self._adjustlinkrev(self._descendantrev)
731 return self._adjustlinkrev(self._descendantrev)
732 else:
732 else:
733 return self._filelog.linkrev(self._filerev)
733 return self._filelog.linkrev(self._filerev)
734
734
735 @propertycache
735 @propertycache
736 def _filenode(self):
736 def _filenode(self):
737 if r'_fileid' in self.__dict__:
737 if r'_fileid' in self.__dict__:
738 return self._filelog.lookup(self._fileid)
738 return self._filelog.lookup(self._fileid)
739 else:
739 else:
740 return self._changectx.filenode(self._path)
740 return self._changectx.filenode(self._path)
741
741
742 @propertycache
742 @propertycache
743 def _filerev(self):
743 def _filerev(self):
744 return self._filelog.rev(self._filenode)
744 return self._filelog.rev(self._filenode)
745
745
746 @propertycache
746 @propertycache
747 def _repopath(self):
747 def _repopath(self):
748 return self._path
748 return self._path
749
749
750 def __nonzero__(self):
750 def __nonzero__(self):
751 try:
751 try:
752 self._filenode
752 self._filenode
753 return True
753 return True
754 except error.LookupError:
754 except error.LookupError:
755 # file is missing
755 # file is missing
756 return False
756 return False
757
757
758 __bool__ = __nonzero__
758 __bool__ = __nonzero__
759
759
760 def __bytes__(self):
760 def __bytes__(self):
761 try:
761 try:
762 return "%s@%s" % (self.path(), self._changectx)
762 return "%s@%s" % (self.path(), self._changectx)
763 except error.LookupError:
763 except error.LookupError:
764 return "%s@???" % self.path()
764 return "%s@???" % self.path()
765
765
766 __str__ = encoding.strmethod(__bytes__)
766 __str__ = encoding.strmethod(__bytes__)
767
767
768 def __repr__(self):
768 def __repr__(self):
769 return "<%s %s>" % (type(self).__name__, str(self))
769 return "<%s %s>" % (type(self).__name__, str(self))
770
770
771 def __hash__(self):
771 def __hash__(self):
772 try:
772 try:
773 return hash((self._path, self._filenode))
773 return hash((self._path, self._filenode))
774 except AttributeError:
774 except AttributeError:
775 return id(self)
775 return id(self)
776
776
777 def __eq__(self, other):
777 def __eq__(self, other):
778 try:
778 try:
779 return (type(self) == type(other) and self._path == other._path
779 return (type(self) == type(other) and self._path == other._path
780 and self._filenode == other._filenode)
780 and self._filenode == other._filenode)
781 except AttributeError:
781 except AttributeError:
782 return False
782 return False
783
783
784 def __ne__(self, other):
784 def __ne__(self, other):
785 return not (self == other)
785 return not (self == other)
786
786
787 def filerev(self):
787 def filerev(self):
788 return self._filerev
788 return self._filerev
789 def filenode(self):
789 def filenode(self):
790 return self._filenode
790 return self._filenode
791 @propertycache
791 @propertycache
792 def _flags(self):
792 def _flags(self):
793 return self._changectx.flags(self._path)
793 return self._changectx.flags(self._path)
794 def flags(self):
794 def flags(self):
795 return self._flags
795 return self._flags
796 def filelog(self):
796 def filelog(self):
797 return self._filelog
797 return self._filelog
798 def rev(self):
798 def rev(self):
799 return self._changeid
799 return self._changeid
800 def linkrev(self):
800 def linkrev(self):
801 return self._filelog.linkrev(self._filerev)
801 return self._filelog.linkrev(self._filerev)
802 def node(self):
802 def node(self):
803 return self._changectx.node()
803 return self._changectx.node()
804 def hex(self):
804 def hex(self):
805 return self._changectx.hex()
805 return self._changectx.hex()
806 def user(self):
806 def user(self):
807 return self._changectx.user()
807 return self._changectx.user()
808 def date(self):
808 def date(self):
809 return self._changectx.date()
809 return self._changectx.date()
810 def files(self):
810 def files(self):
811 return self._changectx.files()
811 return self._changectx.files()
812 def description(self):
812 def description(self):
813 return self._changectx.description()
813 return self._changectx.description()
814 def branch(self):
814 def branch(self):
815 return self._changectx.branch()
815 return self._changectx.branch()
816 def extra(self):
816 def extra(self):
817 return self._changectx.extra()
817 return self._changectx.extra()
818 def phase(self):
818 def phase(self):
819 return self._changectx.phase()
819 return self._changectx.phase()
820 def phasestr(self):
820 def phasestr(self):
821 return self._changectx.phasestr()
821 return self._changectx.phasestr()
822 def obsolete(self):
823 return self._changectx.obsolete()
822 def manifest(self):
824 def manifest(self):
823 return self._changectx.manifest()
825 return self._changectx.manifest()
824 def changectx(self):
826 def changectx(self):
825 return self._changectx
827 return self._changectx
826 def renamed(self):
828 def renamed(self):
827 return self._copied
829 return self._copied
828 def repo(self):
830 def repo(self):
829 return self._repo
831 return self._repo
830 def size(self):
832 def size(self):
831 return len(self.data())
833 return len(self.data())
832
834
833 def path(self):
835 def path(self):
834 return self._path
836 return self._path
835
837
836 def isbinary(self):
838 def isbinary(self):
837 try:
839 try:
838 return util.binary(self.data())
840 return util.binary(self.data())
839 except IOError:
841 except IOError:
840 return False
842 return False
841 def isexec(self):
843 def isexec(self):
842 return 'x' in self.flags()
844 return 'x' in self.flags()
843 def islink(self):
845 def islink(self):
844 return 'l' in self.flags()
846 return 'l' in self.flags()
845
847
846 def isabsent(self):
848 def isabsent(self):
847 """whether this filectx represents a file not in self._changectx
849 """whether this filectx represents a file not in self._changectx
848
850
849 This is mainly for merge code to detect change/delete conflicts. This is
851 This is mainly for merge code to detect change/delete conflicts. This is
850 expected to be True for all subclasses of basectx."""
852 expected to be True for all subclasses of basectx."""
851 return False
853 return False
852
854
853 _customcmp = False
855 _customcmp = False
854 def cmp(self, fctx):
856 def cmp(self, fctx):
855 """compare with other file context
857 """compare with other file context
856
858
857 returns True if different than fctx.
859 returns True if different than fctx.
858 """
860 """
859 if fctx._customcmp:
861 if fctx._customcmp:
860 return fctx.cmp(self)
862 return fctx.cmp(self)
861
863
862 if (fctx._filenode is None
864 if (fctx._filenode is None
863 and (self._repo._encodefilterpats
865 and (self._repo._encodefilterpats
864 # if file data starts with '\1\n', empty metadata block is
866 # if file data starts with '\1\n', empty metadata block is
865 # prepended, which adds 4 bytes to filelog.size().
867 # prepended, which adds 4 bytes to filelog.size().
866 or self.size() - 4 == fctx.size())
868 or self.size() - 4 == fctx.size())
867 or self.size() == fctx.size()):
869 or self.size() == fctx.size()):
868 return self._filelog.cmp(self._filenode, fctx.data())
870 return self._filelog.cmp(self._filenode, fctx.data())
869
871
870 return True
872 return True
871
873
872 def _adjustlinkrev(self, srcrev, inclusive=False):
874 def _adjustlinkrev(self, srcrev, inclusive=False):
873 """return the first ancestor of <srcrev> introducing <fnode>
875 """return the first ancestor of <srcrev> introducing <fnode>
874
876
875 If the linkrev of the file revision does not point to an ancestor of
877 If the linkrev of the file revision does not point to an ancestor of
876 srcrev, we'll walk down the ancestors until we find one introducing
878 srcrev, we'll walk down the ancestors until we find one introducing
877 this file revision.
879 this file revision.
878
880
879 :srcrev: the changeset revision we search ancestors from
881 :srcrev: the changeset revision we search ancestors from
880 :inclusive: if true, the src revision will also be checked
882 :inclusive: if true, the src revision will also be checked
881 """
883 """
882 repo = self._repo
884 repo = self._repo
883 cl = repo.unfiltered().changelog
885 cl = repo.unfiltered().changelog
884 mfl = repo.manifestlog
886 mfl = repo.manifestlog
885 # fetch the linkrev
887 # fetch the linkrev
886 lkr = self.linkrev()
888 lkr = self.linkrev()
887 # hack to reuse ancestor computation when searching for renames
889 # hack to reuse ancestor computation when searching for renames
888 memberanc = getattr(self, '_ancestrycontext', None)
890 memberanc = getattr(self, '_ancestrycontext', None)
889 iteranc = None
891 iteranc = None
890 if srcrev is None:
892 if srcrev is None:
891 # wctx case, used by workingfilectx during mergecopy
893 # wctx case, used by workingfilectx during mergecopy
892 revs = [p.rev() for p in self._repo[None].parents()]
894 revs = [p.rev() for p in self._repo[None].parents()]
893 inclusive = True # we skipped the real (revless) source
895 inclusive = True # we skipped the real (revless) source
894 else:
896 else:
895 revs = [srcrev]
897 revs = [srcrev]
896 if memberanc is None:
898 if memberanc is None:
897 memberanc = iteranc = cl.ancestors(revs, lkr,
899 memberanc = iteranc = cl.ancestors(revs, lkr,
898 inclusive=inclusive)
900 inclusive=inclusive)
899 # check if this linkrev is an ancestor of srcrev
901 # check if this linkrev is an ancestor of srcrev
900 if lkr not in memberanc:
902 if lkr not in memberanc:
901 if iteranc is None:
903 if iteranc is None:
902 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
904 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
903 fnode = self._filenode
905 fnode = self._filenode
904 path = self._path
906 path = self._path
905 for a in iteranc:
907 for a in iteranc:
906 ac = cl.read(a) # get changeset data (we avoid object creation)
908 ac = cl.read(a) # get changeset data (we avoid object creation)
907 if path in ac[3]: # checking the 'files' field.
909 if path in ac[3]: # checking the 'files' field.
908 # The file has been touched, check if the content is
910 # The file has been touched, check if the content is
909 # similar to the one we search for.
911 # similar to the one we search for.
910 if fnode == mfl[ac[0]].readfast().get(path):
912 if fnode == mfl[ac[0]].readfast().get(path):
911 return a
913 return a
912 # In theory, we should never get out of that loop without a result.
914 # In theory, we should never get out of that loop without a result.
913 # But if manifest uses a buggy file revision (not children of the
915 # But if manifest uses a buggy file revision (not children of the
914 # one it replaces) we could. Such a buggy situation will likely
916 # one it replaces) we could. Such a buggy situation will likely
915 # result is crash somewhere else at to some point.
917 # result is crash somewhere else at to some point.
916 return lkr
918 return lkr
917
919
918 def introrev(self):
920 def introrev(self):
919 """return the rev of the changeset which introduced this file revision
921 """return the rev of the changeset which introduced this file revision
920
922
921 This method is different from linkrev because it take into account the
923 This method is different from linkrev because it take into account the
922 changeset the filectx was created from. It ensures the returned
924 changeset the filectx was created from. It ensures the returned
923 revision is one of its ancestors. This prevents bugs from
925 revision is one of its ancestors. This prevents bugs from
924 'linkrev-shadowing' when a file revision is used by multiple
926 'linkrev-shadowing' when a file revision is used by multiple
925 changesets.
927 changesets.
926 """
928 """
927 lkr = self.linkrev()
929 lkr = self.linkrev()
928 attrs = vars(self)
930 attrs = vars(self)
929 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
931 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
930 if noctx or self.rev() == lkr:
932 if noctx or self.rev() == lkr:
931 return self.linkrev()
933 return self.linkrev()
932 return self._adjustlinkrev(self.rev(), inclusive=True)
934 return self._adjustlinkrev(self.rev(), inclusive=True)
933
935
934 def _parentfilectx(self, path, fileid, filelog):
936 def _parentfilectx(self, path, fileid, filelog):
935 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
937 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
936 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
938 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
937 if '_changeid' in vars(self) or '_changectx' in vars(self):
939 if '_changeid' in vars(self) or '_changectx' in vars(self):
938 # If self is associated with a changeset (probably explicitly
940 # If self is associated with a changeset (probably explicitly
939 # fed), ensure the created filectx is associated with a
941 # fed), ensure the created filectx is associated with a
940 # changeset that is an ancestor of self.changectx.
942 # changeset that is an ancestor of self.changectx.
941 # This lets us later use _adjustlinkrev to get a correct link.
943 # This lets us later use _adjustlinkrev to get a correct link.
942 fctx._descendantrev = self.rev()
944 fctx._descendantrev = self.rev()
943 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
945 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
944 elif '_descendantrev' in vars(self):
946 elif '_descendantrev' in vars(self):
945 # Otherwise propagate _descendantrev if we have one associated.
947 # Otherwise propagate _descendantrev if we have one associated.
946 fctx._descendantrev = self._descendantrev
948 fctx._descendantrev = self._descendantrev
947 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
949 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
948 return fctx
950 return fctx
949
951
950 def parents(self):
952 def parents(self):
951 _path = self._path
953 _path = self._path
952 fl = self._filelog
954 fl = self._filelog
953 parents = self._filelog.parents(self._filenode)
955 parents = self._filelog.parents(self._filenode)
954 pl = [(_path, node, fl) for node in parents if node != nullid]
956 pl = [(_path, node, fl) for node in parents if node != nullid]
955
957
956 r = fl.renamed(self._filenode)
958 r = fl.renamed(self._filenode)
957 if r:
959 if r:
958 # - In the simple rename case, both parent are nullid, pl is empty.
960 # - In the simple rename case, both parent are nullid, pl is empty.
959 # - In case of merge, only one of the parent is null id and should
961 # - In case of merge, only one of the parent is null id and should
960 # be replaced with the rename information. This parent is -always-
962 # be replaced with the rename information. This parent is -always-
961 # the first one.
963 # the first one.
962 #
964 #
963 # As null id have always been filtered out in the previous list
965 # As null id have always been filtered out in the previous list
964 # comprehension, inserting to 0 will always result in "replacing
966 # comprehension, inserting to 0 will always result in "replacing
965 # first nullid parent with rename information.
967 # first nullid parent with rename information.
966 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
968 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
967
969
968 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
970 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
969
971
970 def p1(self):
972 def p1(self):
971 return self.parents()[0]
973 return self.parents()[0]
972
974
973 def p2(self):
975 def p2(self):
974 p = self.parents()
976 p = self.parents()
975 if len(p) == 2:
977 if len(p) == 2:
976 return p[1]
978 return p[1]
977 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
979 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
978
980
979 def annotate(self, follow=False, linenumber=False, skiprevs=None,
981 def annotate(self, follow=False, linenumber=False, skiprevs=None,
980 diffopts=None):
982 diffopts=None):
981 '''returns a list of tuples of ((ctx, number), line) for each line
983 '''returns a list of tuples of ((ctx, number), line) for each line
982 in the file, where ctx is the filectx of the node where
984 in the file, where ctx is the filectx of the node where
983 that line was last changed; if linenumber parameter is true, number is
985 that line was last changed; if linenumber parameter is true, number is
984 the line number at the first appearance in the managed file, otherwise,
986 the line number at the first appearance in the managed file, otherwise,
985 number has a fixed value of False.
987 number has a fixed value of False.
986 '''
988 '''
987
989
988 def lines(text):
990 def lines(text):
989 if text.endswith("\n"):
991 if text.endswith("\n"):
990 return text.count("\n")
992 return text.count("\n")
991 return text.count("\n") + int(bool(text))
993 return text.count("\n") + int(bool(text))
992
994
993 if linenumber:
995 if linenumber:
994 def decorate(text, rev):
996 def decorate(text, rev):
995 return ([annotateline(fctx=rev, lineno=i)
997 return ([annotateline(fctx=rev, lineno=i)
996 for i in xrange(1, lines(text) + 1)], text)
998 for i in xrange(1, lines(text) + 1)], text)
997 else:
999 else:
998 def decorate(text, rev):
1000 def decorate(text, rev):
999 return ([annotateline(fctx=rev)] * lines(text), text)
1001 return ([annotateline(fctx=rev)] * lines(text), text)
1000
1002
1001 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1003 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1002
1004
1003 def parents(f):
1005 def parents(f):
1004 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1006 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1005 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1007 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1006 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1008 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1007 # isn't an ancestor of the srcrev.
1009 # isn't an ancestor of the srcrev.
1008 f._changeid
1010 f._changeid
1009 pl = f.parents()
1011 pl = f.parents()
1010
1012
1011 # Don't return renamed parents if we aren't following.
1013 # Don't return renamed parents if we aren't following.
1012 if not follow:
1014 if not follow:
1013 pl = [p for p in pl if p.path() == f.path()]
1015 pl = [p for p in pl if p.path() == f.path()]
1014
1016
1015 # renamed filectx won't have a filelog yet, so set it
1017 # renamed filectx won't have a filelog yet, so set it
1016 # from the cache to save time
1018 # from the cache to save time
1017 for p in pl:
1019 for p in pl:
1018 if not '_filelog' in p.__dict__:
1020 if not '_filelog' in p.__dict__:
1019 p._filelog = getlog(p.path())
1021 p._filelog = getlog(p.path())
1020
1022
1021 return pl
1023 return pl
1022
1024
1023 # use linkrev to find the first changeset where self appeared
1025 # use linkrev to find the first changeset where self appeared
1024 base = self
1026 base = self
1025 introrev = self.introrev()
1027 introrev = self.introrev()
1026 if self.rev() != introrev:
1028 if self.rev() != introrev:
1027 base = self.filectx(self.filenode(), changeid=introrev)
1029 base = self.filectx(self.filenode(), changeid=introrev)
1028 if getattr(base, '_ancestrycontext', None) is None:
1030 if getattr(base, '_ancestrycontext', None) is None:
1029 cl = self._repo.changelog
1031 cl = self._repo.changelog
1030 if introrev is None:
1032 if introrev is None:
1031 # wctx is not inclusive, but works because _ancestrycontext
1033 # wctx is not inclusive, but works because _ancestrycontext
1032 # is used to test filelog revisions
1034 # is used to test filelog revisions
1033 ac = cl.ancestors([p.rev() for p in base.parents()],
1035 ac = cl.ancestors([p.rev() for p in base.parents()],
1034 inclusive=True)
1036 inclusive=True)
1035 else:
1037 else:
1036 ac = cl.ancestors([introrev], inclusive=True)
1038 ac = cl.ancestors([introrev], inclusive=True)
1037 base._ancestrycontext = ac
1039 base._ancestrycontext = ac
1038
1040
1039 # This algorithm would prefer to be recursive, but Python is a
1041 # This algorithm would prefer to be recursive, but Python is a
1040 # bit recursion-hostile. Instead we do an iterative
1042 # bit recursion-hostile. Instead we do an iterative
1041 # depth-first search.
1043 # depth-first search.
1042
1044
1043 # 1st DFS pre-calculates pcache and needed
1045 # 1st DFS pre-calculates pcache and needed
1044 visit = [base]
1046 visit = [base]
1045 pcache = {}
1047 pcache = {}
1046 needed = {base: 1}
1048 needed = {base: 1}
1047 while visit:
1049 while visit:
1048 f = visit.pop()
1050 f = visit.pop()
1049 if f in pcache:
1051 if f in pcache:
1050 continue
1052 continue
1051 pl = parents(f)
1053 pl = parents(f)
1052 pcache[f] = pl
1054 pcache[f] = pl
1053 for p in pl:
1055 for p in pl:
1054 needed[p] = needed.get(p, 0) + 1
1056 needed[p] = needed.get(p, 0) + 1
1055 if p not in pcache:
1057 if p not in pcache:
1056 visit.append(p)
1058 visit.append(p)
1057
1059
1058 # 2nd DFS does the actual annotate
1060 # 2nd DFS does the actual annotate
1059 visit[:] = [base]
1061 visit[:] = [base]
1060 hist = {}
1062 hist = {}
1061 while visit:
1063 while visit:
1062 f = visit[-1]
1064 f = visit[-1]
1063 if f in hist:
1065 if f in hist:
1064 visit.pop()
1066 visit.pop()
1065 continue
1067 continue
1066
1068
1067 ready = True
1069 ready = True
1068 pl = pcache[f]
1070 pl = pcache[f]
1069 for p in pl:
1071 for p in pl:
1070 if p not in hist:
1072 if p not in hist:
1071 ready = False
1073 ready = False
1072 visit.append(p)
1074 visit.append(p)
1073 if ready:
1075 if ready:
1074 visit.pop()
1076 visit.pop()
1075 curr = decorate(f.data(), f)
1077 curr = decorate(f.data(), f)
1076 skipchild = False
1078 skipchild = False
1077 if skiprevs is not None:
1079 if skiprevs is not None:
1078 skipchild = f._changeid in skiprevs
1080 skipchild = f._changeid in skiprevs
1079 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1081 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1080 diffopts)
1082 diffopts)
1081 for p in pl:
1083 for p in pl:
1082 if needed[p] == 1:
1084 if needed[p] == 1:
1083 del hist[p]
1085 del hist[p]
1084 del needed[p]
1086 del needed[p]
1085 else:
1087 else:
1086 needed[p] -= 1
1088 needed[p] -= 1
1087
1089
1088 hist[f] = curr
1090 hist[f] = curr
1089 del pcache[f]
1091 del pcache[f]
1090
1092
1091 return zip(hist[base][0], hist[base][1].splitlines(True))
1093 return zip(hist[base][0], hist[base][1].splitlines(True))
1092
1094
1093 def ancestors(self, followfirst=False):
1095 def ancestors(self, followfirst=False):
1094 visit = {}
1096 visit = {}
1095 c = self
1097 c = self
1096 if followfirst:
1098 if followfirst:
1097 cut = 1
1099 cut = 1
1098 else:
1100 else:
1099 cut = None
1101 cut = None
1100
1102
1101 while True:
1103 while True:
1102 for parent in c.parents()[:cut]:
1104 for parent in c.parents()[:cut]:
1103 visit[(parent.linkrev(), parent.filenode())] = parent
1105 visit[(parent.linkrev(), parent.filenode())] = parent
1104 if not visit:
1106 if not visit:
1105 break
1107 break
1106 c = visit.pop(max(visit))
1108 c = visit.pop(max(visit))
1107 yield c
1109 yield c
1108
1110
1109 def decodeddata(self):
1111 def decodeddata(self):
1110 """Returns `data()` after running repository decoding filters.
1112 """Returns `data()` after running repository decoding filters.
1111
1113
1112 This is often equivalent to how the data would be expressed on disk.
1114 This is often equivalent to how the data would be expressed on disk.
1113 """
1115 """
1114 return self._repo.wwritedata(self.path(), self.data())
1116 return self._repo.wwritedata(self.path(), self.data())
1115
1117
1116 @attr.s(slots=True, frozen=True)
1118 @attr.s(slots=True, frozen=True)
1117 class annotateline(object):
1119 class annotateline(object):
1118 fctx = attr.ib()
1120 fctx = attr.ib()
1119 lineno = attr.ib(default=False)
1121 lineno = attr.ib(default=False)
1120 # Whether this annotation was the result of a skip-annotate.
1122 # Whether this annotation was the result of a skip-annotate.
1121 skip = attr.ib(default=False)
1123 skip = attr.ib(default=False)
1122
1124
1123 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1125 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1124 r'''
1126 r'''
1125 Given parent and child fctxes and annotate data for parents, for all lines
1127 Given parent and child fctxes and annotate data for parents, for all lines
1126 in either parent that match the child, annotate the child with the parent's
1128 in either parent that match the child, annotate the child with the parent's
1127 data.
1129 data.
1128
1130
1129 Additionally, if `skipchild` is True, replace all other lines with parent
1131 Additionally, if `skipchild` is True, replace all other lines with parent
1130 annotate data as well such that child is never blamed for any lines.
1132 annotate data as well such that child is never blamed for any lines.
1131
1133
1132 See test-annotate.py for unit tests.
1134 See test-annotate.py for unit tests.
1133 '''
1135 '''
1134 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1136 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1135 for parent in parents]
1137 for parent in parents]
1136
1138
1137 if skipchild:
1139 if skipchild:
1138 # Need to iterate over the blocks twice -- make it a list
1140 # Need to iterate over the blocks twice -- make it a list
1139 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1141 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1140 # Mercurial currently prefers p2 over p1 for annotate.
1142 # Mercurial currently prefers p2 over p1 for annotate.
1141 # TODO: change this?
1143 # TODO: change this?
1142 for parent, blocks in pblocks:
1144 for parent, blocks in pblocks:
1143 for (a1, a2, b1, b2), t in blocks:
1145 for (a1, a2, b1, b2), t in blocks:
1144 # Changed blocks ('!') or blocks made only of blank lines ('~')
1146 # Changed blocks ('!') or blocks made only of blank lines ('~')
1145 # belong to the child.
1147 # belong to the child.
1146 if t == '=':
1148 if t == '=':
1147 child[0][b1:b2] = parent[0][a1:a2]
1149 child[0][b1:b2] = parent[0][a1:a2]
1148
1150
1149 if skipchild:
1151 if skipchild:
1150 # Now try and match up anything that couldn't be matched,
1152 # Now try and match up anything that couldn't be matched,
1151 # Reversing pblocks maintains bias towards p2, matching above
1153 # Reversing pblocks maintains bias towards p2, matching above
1152 # behavior.
1154 # behavior.
1153 pblocks.reverse()
1155 pblocks.reverse()
1154
1156
1155 # The heuristics are:
1157 # The heuristics are:
1156 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1158 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1157 # This could potentially be smarter but works well enough.
1159 # This could potentially be smarter but works well enough.
1158 # * For a non-matching section, do a best-effort fit. Match lines in
1160 # * For a non-matching section, do a best-effort fit. Match lines in
1159 # diff hunks 1:1, dropping lines as necessary.
1161 # diff hunks 1:1, dropping lines as necessary.
1160 # * Repeat the last line as a last resort.
1162 # * Repeat the last line as a last resort.
1161
1163
1162 # First, replace as much as possible without repeating the last line.
1164 # First, replace as much as possible without repeating the last line.
1163 remaining = [(parent, []) for parent, _blocks in pblocks]
1165 remaining = [(parent, []) for parent, _blocks in pblocks]
1164 for idx, (parent, blocks) in enumerate(pblocks):
1166 for idx, (parent, blocks) in enumerate(pblocks):
1165 for (a1, a2, b1, b2), _t in blocks:
1167 for (a1, a2, b1, b2), _t in blocks:
1166 if a2 - a1 >= b2 - b1:
1168 if a2 - a1 >= b2 - b1:
1167 for bk in xrange(b1, b2):
1169 for bk in xrange(b1, b2):
1168 if child[0][bk].fctx == childfctx:
1170 if child[0][bk].fctx == childfctx:
1169 ak = min(a1 + (bk - b1), a2 - 1)
1171 ak = min(a1 + (bk - b1), a2 - 1)
1170 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1172 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1171 else:
1173 else:
1172 remaining[idx][1].append((a1, a2, b1, b2))
1174 remaining[idx][1].append((a1, a2, b1, b2))
1173
1175
1174 # Then, look at anything left, which might involve repeating the last
1176 # Then, look at anything left, which might involve repeating the last
1175 # line.
1177 # line.
1176 for parent, blocks in remaining:
1178 for parent, blocks in remaining:
1177 for a1, a2, b1, b2 in blocks:
1179 for a1, a2, b1, b2 in blocks:
1178 for bk in xrange(b1, b2):
1180 for bk in xrange(b1, b2):
1179 if child[0][bk].fctx == childfctx:
1181 if child[0][bk].fctx == childfctx:
1180 ak = min(a1 + (bk - b1), a2 - 1)
1182 ak = min(a1 + (bk - b1), a2 - 1)
1181 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1183 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1182 return child
1184 return child
1183
1185
1184 class filectx(basefilectx):
1186 class filectx(basefilectx):
1185 """A filecontext object makes access to data related to a particular
1187 """A filecontext object makes access to data related to a particular
1186 filerevision convenient."""
1188 filerevision convenient."""
1187 def __init__(self, repo, path, changeid=None, fileid=None,
1189 def __init__(self, repo, path, changeid=None, fileid=None,
1188 filelog=None, changectx=None):
1190 filelog=None, changectx=None):
1189 """changeid can be a changeset revision, node, or tag.
1191 """changeid can be a changeset revision, node, or tag.
1190 fileid can be a file revision or node."""
1192 fileid can be a file revision or node."""
1191 self._repo = repo
1193 self._repo = repo
1192 self._path = path
1194 self._path = path
1193
1195
1194 assert (changeid is not None
1196 assert (changeid is not None
1195 or fileid is not None
1197 or fileid is not None
1196 or changectx is not None), \
1198 or changectx is not None), \
1197 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1199 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1198 % (changeid, fileid, changectx))
1200 % (changeid, fileid, changectx))
1199
1201
1200 if filelog is not None:
1202 if filelog is not None:
1201 self._filelog = filelog
1203 self._filelog = filelog
1202
1204
1203 if changeid is not None:
1205 if changeid is not None:
1204 self._changeid = changeid
1206 self._changeid = changeid
1205 if changectx is not None:
1207 if changectx is not None:
1206 self._changectx = changectx
1208 self._changectx = changectx
1207 if fileid is not None:
1209 if fileid is not None:
1208 self._fileid = fileid
1210 self._fileid = fileid
1209
1211
1210 @propertycache
1212 @propertycache
1211 def _changectx(self):
1213 def _changectx(self):
1212 try:
1214 try:
1213 return changectx(self._repo, self._changeid)
1215 return changectx(self._repo, self._changeid)
1214 except error.FilteredRepoLookupError:
1216 except error.FilteredRepoLookupError:
1215 # Linkrev may point to any revision in the repository. When the
1217 # Linkrev may point to any revision in the repository. When the
1216 # repository is filtered this may lead to `filectx` trying to build
1218 # repository is filtered this may lead to `filectx` trying to build
1217 # `changectx` for filtered revision. In such case we fallback to
1219 # `changectx` for filtered revision. In such case we fallback to
1218 # creating `changectx` on the unfiltered version of the reposition.
1220 # creating `changectx` on the unfiltered version of the reposition.
1219 # This fallback should not be an issue because `changectx` from
1221 # This fallback should not be an issue because `changectx` from
1220 # `filectx` are not used in complex operations that care about
1222 # `filectx` are not used in complex operations that care about
1221 # filtering.
1223 # filtering.
1222 #
1224 #
1223 # This fallback is a cheap and dirty fix that prevent several
1225 # This fallback is a cheap and dirty fix that prevent several
1224 # crashes. It does not ensure the behavior is correct. However the
1226 # crashes. It does not ensure the behavior is correct. However the
1225 # behavior was not correct before filtering either and "incorrect
1227 # behavior was not correct before filtering either and "incorrect
1226 # behavior" is seen as better as "crash"
1228 # behavior" is seen as better as "crash"
1227 #
1229 #
1228 # Linkrevs have several serious troubles with filtering that are
1230 # Linkrevs have several serious troubles with filtering that are
1229 # complicated to solve. Proper handling of the issue here should be
1231 # complicated to solve. Proper handling of the issue here should be
1230 # considered when solving linkrev issue are on the table.
1232 # considered when solving linkrev issue are on the table.
1231 return changectx(self._repo.unfiltered(), self._changeid)
1233 return changectx(self._repo.unfiltered(), self._changeid)
1232
1234
1233 def filectx(self, fileid, changeid=None):
1235 def filectx(self, fileid, changeid=None):
1234 '''opens an arbitrary revision of the file without
1236 '''opens an arbitrary revision of the file without
1235 opening a new filelog'''
1237 opening a new filelog'''
1236 return filectx(self._repo, self._path, fileid=fileid,
1238 return filectx(self._repo, self._path, fileid=fileid,
1237 filelog=self._filelog, changeid=changeid)
1239 filelog=self._filelog, changeid=changeid)
1238
1240
1239 def rawdata(self):
1241 def rawdata(self):
1240 return self._filelog.revision(self._filenode, raw=True)
1242 return self._filelog.revision(self._filenode, raw=True)
1241
1243
1242 def rawflags(self):
1244 def rawflags(self):
1243 """low-level revlog flags"""
1245 """low-level revlog flags"""
1244 return self._filelog.flags(self._filerev)
1246 return self._filelog.flags(self._filerev)
1245
1247
1246 def data(self):
1248 def data(self):
1247 try:
1249 try:
1248 return self._filelog.read(self._filenode)
1250 return self._filelog.read(self._filenode)
1249 except error.CensoredNodeError:
1251 except error.CensoredNodeError:
1250 if self._repo.ui.config("censor", "policy") == "ignore":
1252 if self._repo.ui.config("censor", "policy") == "ignore":
1251 return ""
1253 return ""
1252 raise error.Abort(_("censored node: %s") % short(self._filenode),
1254 raise error.Abort(_("censored node: %s") % short(self._filenode),
1253 hint=_("set censor.policy to ignore errors"))
1255 hint=_("set censor.policy to ignore errors"))
1254
1256
1255 def size(self):
1257 def size(self):
1256 return self._filelog.size(self._filerev)
1258 return self._filelog.size(self._filerev)
1257
1259
1258 @propertycache
1260 @propertycache
1259 def _copied(self):
1261 def _copied(self):
1260 """check if file was actually renamed in this changeset revision
1262 """check if file was actually renamed in this changeset revision
1261
1263
1262 If rename logged in file revision, we report copy for changeset only
1264 If rename logged in file revision, we report copy for changeset only
1263 if file revisions linkrev points back to the changeset in question
1265 if file revisions linkrev points back to the changeset in question
1264 or both changeset parents contain different file revisions.
1266 or both changeset parents contain different file revisions.
1265 """
1267 """
1266
1268
1267 renamed = self._filelog.renamed(self._filenode)
1269 renamed = self._filelog.renamed(self._filenode)
1268 if not renamed:
1270 if not renamed:
1269 return renamed
1271 return renamed
1270
1272
1271 if self.rev() == self.linkrev():
1273 if self.rev() == self.linkrev():
1272 return renamed
1274 return renamed
1273
1275
1274 name = self.path()
1276 name = self.path()
1275 fnode = self._filenode
1277 fnode = self._filenode
1276 for p in self._changectx.parents():
1278 for p in self._changectx.parents():
1277 try:
1279 try:
1278 if fnode == p.filenode(name):
1280 if fnode == p.filenode(name):
1279 return None
1281 return None
1280 except error.LookupError:
1282 except error.LookupError:
1281 pass
1283 pass
1282 return renamed
1284 return renamed
1283
1285
1284 def children(self):
1286 def children(self):
1285 # hard for renames
1287 # hard for renames
1286 c = self._filelog.children(self._filenode)
1288 c = self._filelog.children(self._filenode)
1287 return [filectx(self._repo, self._path, fileid=x,
1289 return [filectx(self._repo, self._path, fileid=x,
1288 filelog=self._filelog) for x in c]
1290 filelog=self._filelog) for x in c]
1289
1291
1290 class committablectx(basectx):
1292 class committablectx(basectx):
1291 """A committablectx object provides common functionality for a context that
1293 """A committablectx object provides common functionality for a context that
1292 wants the ability to commit, e.g. workingctx or memctx."""
1294 wants the ability to commit, e.g. workingctx or memctx."""
1293 def __init__(self, repo, text="", user=None, date=None, extra=None,
1295 def __init__(self, repo, text="", user=None, date=None, extra=None,
1294 changes=None):
1296 changes=None):
1295 self._repo = repo
1297 self._repo = repo
1296 self._rev = None
1298 self._rev = None
1297 self._node = None
1299 self._node = None
1298 self._text = text
1300 self._text = text
1299 if date:
1301 if date:
1300 self._date = util.parsedate(date)
1302 self._date = util.parsedate(date)
1301 if user:
1303 if user:
1302 self._user = user
1304 self._user = user
1303 if changes:
1305 if changes:
1304 self._status = changes
1306 self._status = changes
1305
1307
1306 self._extra = {}
1308 self._extra = {}
1307 if extra:
1309 if extra:
1308 self._extra = extra.copy()
1310 self._extra = extra.copy()
1309 if 'branch' not in self._extra:
1311 if 'branch' not in self._extra:
1310 try:
1312 try:
1311 branch = encoding.fromlocal(self._repo.dirstate.branch())
1313 branch = encoding.fromlocal(self._repo.dirstate.branch())
1312 except UnicodeDecodeError:
1314 except UnicodeDecodeError:
1313 raise error.Abort(_('branch name not in UTF-8!'))
1315 raise error.Abort(_('branch name not in UTF-8!'))
1314 self._extra['branch'] = branch
1316 self._extra['branch'] = branch
1315 if self._extra['branch'] == '':
1317 if self._extra['branch'] == '':
1316 self._extra['branch'] = 'default'
1318 self._extra['branch'] = 'default'
1317
1319
1318 def __bytes__(self):
1320 def __bytes__(self):
1319 return bytes(self._parents[0]) + "+"
1321 return bytes(self._parents[0]) + "+"
1320
1322
1321 __str__ = encoding.strmethod(__bytes__)
1323 __str__ = encoding.strmethod(__bytes__)
1322
1324
1323 def __nonzero__(self):
1325 def __nonzero__(self):
1324 return True
1326 return True
1325
1327
1326 __bool__ = __nonzero__
1328 __bool__ = __nonzero__
1327
1329
1328 def _buildflagfunc(self):
1330 def _buildflagfunc(self):
1329 # Create a fallback function for getting file flags when the
1331 # Create a fallback function for getting file flags when the
1330 # filesystem doesn't support them
1332 # filesystem doesn't support them
1331
1333
1332 copiesget = self._repo.dirstate.copies().get
1334 copiesget = self._repo.dirstate.copies().get
1333 parents = self.parents()
1335 parents = self.parents()
1334 if len(parents) < 2:
1336 if len(parents) < 2:
1335 # when we have one parent, it's easy: copy from parent
1337 # when we have one parent, it's easy: copy from parent
1336 man = parents[0].manifest()
1338 man = parents[0].manifest()
1337 def func(f):
1339 def func(f):
1338 f = copiesget(f, f)
1340 f = copiesget(f, f)
1339 return man.flags(f)
1341 return man.flags(f)
1340 else:
1342 else:
1341 # merges are tricky: we try to reconstruct the unstored
1343 # merges are tricky: we try to reconstruct the unstored
1342 # result from the merge (issue1802)
1344 # result from the merge (issue1802)
1343 p1, p2 = parents
1345 p1, p2 = parents
1344 pa = p1.ancestor(p2)
1346 pa = p1.ancestor(p2)
1345 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1347 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1346
1348
1347 def func(f):
1349 def func(f):
1348 f = copiesget(f, f) # may be wrong for merges with copies
1350 f = copiesget(f, f) # may be wrong for merges with copies
1349 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1351 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1350 if fl1 == fl2:
1352 if fl1 == fl2:
1351 return fl1
1353 return fl1
1352 if fl1 == fla:
1354 if fl1 == fla:
1353 return fl2
1355 return fl2
1354 if fl2 == fla:
1356 if fl2 == fla:
1355 return fl1
1357 return fl1
1356 return '' # punt for conflicts
1358 return '' # punt for conflicts
1357
1359
1358 return func
1360 return func
1359
1361
1360 @propertycache
1362 @propertycache
1361 def _flagfunc(self):
1363 def _flagfunc(self):
1362 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1364 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1363
1365
1364 @propertycache
1366 @propertycache
1365 def _status(self):
1367 def _status(self):
1366 return self._repo.status()
1368 return self._repo.status()
1367
1369
1368 @propertycache
1370 @propertycache
1369 def _user(self):
1371 def _user(self):
1370 return self._repo.ui.username()
1372 return self._repo.ui.username()
1371
1373
1372 @propertycache
1374 @propertycache
1373 def _date(self):
1375 def _date(self):
1374 ui = self._repo.ui
1376 ui = self._repo.ui
1375 date = ui.configdate('devel', 'default-date')
1377 date = ui.configdate('devel', 'default-date')
1376 if date is None:
1378 if date is None:
1377 date = util.makedate()
1379 date = util.makedate()
1378 return date
1380 return date
1379
1381
1380 def subrev(self, subpath):
1382 def subrev(self, subpath):
1381 return None
1383 return None
1382
1384
1383 def manifestnode(self):
1385 def manifestnode(self):
1384 return None
1386 return None
1385 def user(self):
1387 def user(self):
1386 return self._user or self._repo.ui.username()
1388 return self._user or self._repo.ui.username()
1387 def date(self):
1389 def date(self):
1388 return self._date
1390 return self._date
1389 def description(self):
1391 def description(self):
1390 return self._text
1392 return self._text
1391 def files(self):
1393 def files(self):
1392 return sorted(self._status.modified + self._status.added +
1394 return sorted(self._status.modified + self._status.added +
1393 self._status.removed)
1395 self._status.removed)
1394
1396
1395 def modified(self):
1397 def modified(self):
1396 return self._status.modified
1398 return self._status.modified
1397 def added(self):
1399 def added(self):
1398 return self._status.added
1400 return self._status.added
1399 def removed(self):
1401 def removed(self):
1400 return self._status.removed
1402 return self._status.removed
1401 def deleted(self):
1403 def deleted(self):
1402 return self._status.deleted
1404 return self._status.deleted
1403 def branch(self):
1405 def branch(self):
1404 return encoding.tolocal(self._extra['branch'])
1406 return encoding.tolocal(self._extra['branch'])
1405 def closesbranch(self):
1407 def closesbranch(self):
1406 return 'close' in self._extra
1408 return 'close' in self._extra
1407 def extra(self):
1409 def extra(self):
1408 return self._extra
1410 return self._extra
1409
1411
1410 def isinmemory(self):
1412 def isinmemory(self):
1411 return False
1413 return False
1412
1414
1413 def tags(self):
1415 def tags(self):
1414 return []
1416 return []
1415
1417
1416 def bookmarks(self):
1418 def bookmarks(self):
1417 b = []
1419 b = []
1418 for p in self.parents():
1420 for p in self.parents():
1419 b.extend(p.bookmarks())
1421 b.extend(p.bookmarks())
1420 return b
1422 return b
1421
1423
1422 def phase(self):
1424 def phase(self):
1423 phase = phases.draft # default phase to draft
1425 phase = phases.draft # default phase to draft
1424 for p in self.parents():
1426 for p in self.parents():
1425 phase = max(phase, p.phase())
1427 phase = max(phase, p.phase())
1426 return phase
1428 return phase
1427
1429
1428 def hidden(self):
1430 def hidden(self):
1429 return False
1431 return False
1430
1432
1431 def children(self):
1433 def children(self):
1432 return []
1434 return []
1433
1435
1434 def flags(self, path):
1436 def flags(self, path):
1435 if r'_manifest' in self.__dict__:
1437 if r'_manifest' in self.__dict__:
1436 try:
1438 try:
1437 return self._manifest.flags(path)
1439 return self._manifest.flags(path)
1438 except KeyError:
1440 except KeyError:
1439 return ''
1441 return ''
1440
1442
1441 try:
1443 try:
1442 return self._flagfunc(path)
1444 return self._flagfunc(path)
1443 except OSError:
1445 except OSError:
1444 return ''
1446 return ''
1445
1447
1446 def ancestor(self, c2):
1448 def ancestor(self, c2):
1447 """return the "best" ancestor context of self and c2"""
1449 """return the "best" ancestor context of self and c2"""
1448 return self._parents[0].ancestor(c2) # punt on two parents for now
1450 return self._parents[0].ancestor(c2) # punt on two parents for now
1449
1451
1450 def walk(self, match):
1452 def walk(self, match):
1451 '''Generates matching file names.'''
1453 '''Generates matching file names.'''
1452 return sorted(self._repo.dirstate.walk(match,
1454 return sorted(self._repo.dirstate.walk(match,
1453 subrepos=sorted(self.substate),
1455 subrepos=sorted(self.substate),
1454 unknown=True, ignored=False))
1456 unknown=True, ignored=False))
1455
1457
1456 def matches(self, match):
1458 def matches(self, match):
1457 return sorted(self._repo.dirstate.matches(match))
1459 return sorted(self._repo.dirstate.matches(match))
1458
1460
1459 def ancestors(self):
1461 def ancestors(self):
1460 for p in self._parents:
1462 for p in self._parents:
1461 yield p
1463 yield p
1462 for a in self._repo.changelog.ancestors(
1464 for a in self._repo.changelog.ancestors(
1463 [p.rev() for p in self._parents]):
1465 [p.rev() for p in self._parents]):
1464 yield changectx(self._repo, a)
1466 yield changectx(self._repo, a)
1465
1467
1466 def markcommitted(self, node):
1468 def markcommitted(self, node):
1467 """Perform post-commit cleanup necessary after committing this ctx
1469 """Perform post-commit cleanup necessary after committing this ctx
1468
1470
1469 Specifically, this updates backing stores this working context
1471 Specifically, this updates backing stores this working context
1470 wraps to reflect the fact that the changes reflected by this
1472 wraps to reflect the fact that the changes reflected by this
1471 workingctx have been committed. For example, it marks
1473 workingctx have been committed. For example, it marks
1472 modified and added files as normal in the dirstate.
1474 modified and added files as normal in the dirstate.
1473
1475
1474 """
1476 """
1475
1477
1476 with self._repo.dirstate.parentchange():
1478 with self._repo.dirstate.parentchange():
1477 for f in self.modified() + self.added():
1479 for f in self.modified() + self.added():
1478 self._repo.dirstate.normal(f)
1480 self._repo.dirstate.normal(f)
1479 for f in self.removed():
1481 for f in self.removed():
1480 self._repo.dirstate.drop(f)
1482 self._repo.dirstate.drop(f)
1481 self._repo.dirstate.setparents(node)
1483 self._repo.dirstate.setparents(node)
1482
1484
1483 # write changes out explicitly, because nesting wlock at
1485 # write changes out explicitly, because nesting wlock at
1484 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1486 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1485 # from immediately doing so for subsequent changing files
1487 # from immediately doing so for subsequent changing files
1486 self._repo.dirstate.write(self._repo.currenttransaction())
1488 self._repo.dirstate.write(self._repo.currenttransaction())
1487
1489
1488 def dirty(self, missing=False, merge=True, branch=True):
1490 def dirty(self, missing=False, merge=True, branch=True):
1489 return False
1491 return False
1490
1492
1491 class workingctx(committablectx):
1493 class workingctx(committablectx):
1492 """A workingctx object makes access to data related to
1494 """A workingctx object makes access to data related to
1493 the current working directory convenient.
1495 the current working directory convenient.
1494 date - any valid date string or (unixtime, offset), or None.
1496 date - any valid date string or (unixtime, offset), or None.
1495 user - username string, or None.
1497 user - username string, or None.
1496 extra - a dictionary of extra values, or None.
1498 extra - a dictionary of extra values, or None.
1497 changes - a list of file lists as returned by localrepo.status()
1499 changes - a list of file lists as returned by localrepo.status()
1498 or None to use the repository status.
1500 or None to use the repository status.
1499 """
1501 """
1500 def __init__(self, repo, text="", user=None, date=None, extra=None,
1502 def __init__(self, repo, text="", user=None, date=None, extra=None,
1501 changes=None):
1503 changes=None):
1502 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1504 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1503
1505
1504 def __iter__(self):
1506 def __iter__(self):
1505 d = self._repo.dirstate
1507 d = self._repo.dirstate
1506 for f in d:
1508 for f in d:
1507 if d[f] != 'r':
1509 if d[f] != 'r':
1508 yield f
1510 yield f
1509
1511
1510 def __contains__(self, key):
1512 def __contains__(self, key):
1511 return self._repo.dirstate[key] not in "?r"
1513 return self._repo.dirstate[key] not in "?r"
1512
1514
1513 def hex(self):
1515 def hex(self):
1514 return hex(wdirid)
1516 return hex(wdirid)
1515
1517
1516 @propertycache
1518 @propertycache
1517 def _parents(self):
1519 def _parents(self):
1518 p = self._repo.dirstate.parents()
1520 p = self._repo.dirstate.parents()
1519 if p[1] == nullid:
1521 if p[1] == nullid:
1520 p = p[:-1]
1522 p = p[:-1]
1521 return [changectx(self._repo, x) for x in p]
1523 return [changectx(self._repo, x) for x in p]
1522
1524
1523 def filectx(self, path, filelog=None):
1525 def filectx(self, path, filelog=None):
1524 """get a file context from the working directory"""
1526 """get a file context from the working directory"""
1525 return workingfilectx(self._repo, path, workingctx=self,
1527 return workingfilectx(self._repo, path, workingctx=self,
1526 filelog=filelog)
1528 filelog=filelog)
1527
1529
1528 def dirty(self, missing=False, merge=True, branch=True):
1530 def dirty(self, missing=False, merge=True, branch=True):
1529 "check whether a working directory is modified"
1531 "check whether a working directory is modified"
1530 # check subrepos first
1532 # check subrepos first
1531 for s in sorted(self.substate):
1533 for s in sorted(self.substate):
1532 if self.sub(s).dirty(missing=missing):
1534 if self.sub(s).dirty(missing=missing):
1533 return True
1535 return True
1534 # check current working dir
1536 # check current working dir
1535 return ((merge and self.p2()) or
1537 return ((merge and self.p2()) or
1536 (branch and self.branch() != self.p1().branch()) or
1538 (branch and self.branch() != self.p1().branch()) or
1537 self.modified() or self.added() or self.removed() or
1539 self.modified() or self.added() or self.removed() or
1538 (missing and self.deleted()))
1540 (missing and self.deleted()))
1539
1541
1540 def add(self, list, prefix=""):
1542 def add(self, list, prefix=""):
1541 with self._repo.wlock():
1543 with self._repo.wlock():
1542 ui, ds = self._repo.ui, self._repo.dirstate
1544 ui, ds = self._repo.ui, self._repo.dirstate
1543 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1545 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1544 rejected = []
1546 rejected = []
1545 lstat = self._repo.wvfs.lstat
1547 lstat = self._repo.wvfs.lstat
1546 for f in list:
1548 for f in list:
1547 # ds.pathto() returns an absolute file when this is invoked from
1549 # ds.pathto() returns an absolute file when this is invoked from
1548 # the keyword extension. That gets flagged as non-portable on
1550 # the keyword extension. That gets flagged as non-portable on
1549 # Windows, since it contains the drive letter and colon.
1551 # Windows, since it contains the drive letter and colon.
1550 scmutil.checkportable(ui, os.path.join(prefix, f))
1552 scmutil.checkportable(ui, os.path.join(prefix, f))
1551 try:
1553 try:
1552 st = lstat(f)
1554 st = lstat(f)
1553 except OSError:
1555 except OSError:
1554 ui.warn(_("%s does not exist!\n") % uipath(f))
1556 ui.warn(_("%s does not exist!\n") % uipath(f))
1555 rejected.append(f)
1557 rejected.append(f)
1556 continue
1558 continue
1557 if st.st_size > 10000000:
1559 if st.st_size > 10000000:
1558 ui.warn(_("%s: up to %d MB of RAM may be required "
1560 ui.warn(_("%s: up to %d MB of RAM may be required "
1559 "to manage this file\n"
1561 "to manage this file\n"
1560 "(use 'hg revert %s' to cancel the "
1562 "(use 'hg revert %s' to cancel the "
1561 "pending addition)\n")
1563 "pending addition)\n")
1562 % (f, 3 * st.st_size // 1000000, uipath(f)))
1564 % (f, 3 * st.st_size // 1000000, uipath(f)))
1563 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1565 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1564 ui.warn(_("%s not added: only files and symlinks "
1566 ui.warn(_("%s not added: only files and symlinks "
1565 "supported currently\n") % uipath(f))
1567 "supported currently\n") % uipath(f))
1566 rejected.append(f)
1568 rejected.append(f)
1567 elif ds[f] in 'amn':
1569 elif ds[f] in 'amn':
1568 ui.warn(_("%s already tracked!\n") % uipath(f))
1570 ui.warn(_("%s already tracked!\n") % uipath(f))
1569 elif ds[f] == 'r':
1571 elif ds[f] == 'r':
1570 ds.normallookup(f)
1572 ds.normallookup(f)
1571 else:
1573 else:
1572 ds.add(f)
1574 ds.add(f)
1573 return rejected
1575 return rejected
1574
1576
1575 def forget(self, files, prefix=""):
1577 def forget(self, files, prefix=""):
1576 with self._repo.wlock():
1578 with self._repo.wlock():
1577 ds = self._repo.dirstate
1579 ds = self._repo.dirstate
1578 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1580 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1579 rejected = []
1581 rejected = []
1580 for f in files:
1582 for f in files:
1581 if f not in self._repo.dirstate:
1583 if f not in self._repo.dirstate:
1582 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1584 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1583 rejected.append(f)
1585 rejected.append(f)
1584 elif self._repo.dirstate[f] != 'a':
1586 elif self._repo.dirstate[f] != 'a':
1585 self._repo.dirstate.remove(f)
1587 self._repo.dirstate.remove(f)
1586 else:
1588 else:
1587 self._repo.dirstate.drop(f)
1589 self._repo.dirstate.drop(f)
1588 return rejected
1590 return rejected
1589
1591
1590 def undelete(self, list):
1592 def undelete(self, list):
1591 pctxs = self.parents()
1593 pctxs = self.parents()
1592 with self._repo.wlock():
1594 with self._repo.wlock():
1593 ds = self._repo.dirstate
1595 ds = self._repo.dirstate
1594 for f in list:
1596 for f in list:
1595 if self._repo.dirstate[f] != 'r':
1597 if self._repo.dirstate[f] != 'r':
1596 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1598 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1597 else:
1599 else:
1598 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1600 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1599 t = fctx.data()
1601 t = fctx.data()
1600 self._repo.wwrite(f, t, fctx.flags())
1602 self._repo.wwrite(f, t, fctx.flags())
1601 self._repo.dirstate.normal(f)
1603 self._repo.dirstate.normal(f)
1602
1604
1603 def copy(self, source, dest):
1605 def copy(self, source, dest):
1604 try:
1606 try:
1605 st = self._repo.wvfs.lstat(dest)
1607 st = self._repo.wvfs.lstat(dest)
1606 except OSError as err:
1608 except OSError as err:
1607 if err.errno != errno.ENOENT:
1609 if err.errno != errno.ENOENT:
1608 raise
1610 raise
1609 self._repo.ui.warn(_("%s does not exist!\n")
1611 self._repo.ui.warn(_("%s does not exist!\n")
1610 % self._repo.dirstate.pathto(dest))
1612 % self._repo.dirstate.pathto(dest))
1611 return
1613 return
1612 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1614 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1613 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1615 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1614 "symbolic link\n")
1616 "symbolic link\n")
1615 % self._repo.dirstate.pathto(dest))
1617 % self._repo.dirstate.pathto(dest))
1616 else:
1618 else:
1617 with self._repo.wlock():
1619 with self._repo.wlock():
1618 if self._repo.dirstate[dest] in '?':
1620 if self._repo.dirstate[dest] in '?':
1619 self._repo.dirstate.add(dest)
1621 self._repo.dirstate.add(dest)
1620 elif self._repo.dirstate[dest] in 'r':
1622 elif self._repo.dirstate[dest] in 'r':
1621 self._repo.dirstate.normallookup(dest)
1623 self._repo.dirstate.normallookup(dest)
1622 self._repo.dirstate.copy(source, dest)
1624 self._repo.dirstate.copy(source, dest)
1623
1625
1624 def match(self, pats=None, include=None, exclude=None, default='glob',
1626 def match(self, pats=None, include=None, exclude=None, default='glob',
1625 listsubrepos=False, badfn=None):
1627 listsubrepos=False, badfn=None):
1626 r = self._repo
1628 r = self._repo
1627
1629
1628 # Only a case insensitive filesystem needs magic to translate user input
1630 # Only a case insensitive filesystem needs magic to translate user input
1629 # to actual case in the filesystem.
1631 # to actual case in the filesystem.
1630 icasefs = not util.fscasesensitive(r.root)
1632 icasefs = not util.fscasesensitive(r.root)
1631 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1633 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1632 default, auditor=r.auditor, ctx=self,
1634 default, auditor=r.auditor, ctx=self,
1633 listsubrepos=listsubrepos, badfn=badfn,
1635 listsubrepos=listsubrepos, badfn=badfn,
1634 icasefs=icasefs)
1636 icasefs=icasefs)
1635
1637
1636 def flushall(self):
1638 def flushall(self):
1637 pass # For overlayworkingfilectx compatibility.
1639 pass # For overlayworkingfilectx compatibility.
1638
1640
1639 def _filtersuspectsymlink(self, files):
1641 def _filtersuspectsymlink(self, files):
1640 if not files or self._repo.dirstate._checklink:
1642 if not files or self._repo.dirstate._checklink:
1641 return files
1643 return files
1642
1644
1643 # Symlink placeholders may get non-symlink-like contents
1645 # Symlink placeholders may get non-symlink-like contents
1644 # via user error or dereferencing by NFS or Samba servers,
1646 # via user error or dereferencing by NFS or Samba servers,
1645 # so we filter out any placeholders that don't look like a
1647 # so we filter out any placeholders that don't look like a
1646 # symlink
1648 # symlink
1647 sane = []
1649 sane = []
1648 for f in files:
1650 for f in files:
1649 if self.flags(f) == 'l':
1651 if self.flags(f) == 'l':
1650 d = self[f].data()
1652 d = self[f].data()
1651 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1653 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1652 self._repo.ui.debug('ignoring suspect symlink placeholder'
1654 self._repo.ui.debug('ignoring suspect symlink placeholder'
1653 ' "%s"\n' % f)
1655 ' "%s"\n' % f)
1654 continue
1656 continue
1655 sane.append(f)
1657 sane.append(f)
1656 return sane
1658 return sane
1657
1659
1658 def _checklookup(self, files):
1660 def _checklookup(self, files):
1659 # check for any possibly clean files
1661 # check for any possibly clean files
1660 if not files:
1662 if not files:
1661 return [], [], []
1663 return [], [], []
1662
1664
1663 modified = []
1665 modified = []
1664 deleted = []
1666 deleted = []
1665 fixup = []
1667 fixup = []
1666 pctx = self._parents[0]
1668 pctx = self._parents[0]
1667 # do a full compare of any files that might have changed
1669 # do a full compare of any files that might have changed
1668 for f in sorted(files):
1670 for f in sorted(files):
1669 try:
1671 try:
1670 # This will return True for a file that got replaced by a
1672 # This will return True for a file that got replaced by a
1671 # directory in the interim, but fixing that is pretty hard.
1673 # directory in the interim, but fixing that is pretty hard.
1672 if (f not in pctx or self.flags(f) != pctx.flags(f)
1674 if (f not in pctx or self.flags(f) != pctx.flags(f)
1673 or pctx[f].cmp(self[f])):
1675 or pctx[f].cmp(self[f])):
1674 modified.append(f)
1676 modified.append(f)
1675 else:
1677 else:
1676 fixup.append(f)
1678 fixup.append(f)
1677 except (IOError, OSError):
1679 except (IOError, OSError):
1678 # A file become inaccessible in between? Mark it as deleted,
1680 # A file become inaccessible in between? Mark it as deleted,
1679 # matching dirstate behavior (issue5584).
1681 # matching dirstate behavior (issue5584).
1680 # The dirstate has more complex behavior around whether a
1682 # The dirstate has more complex behavior around whether a
1681 # missing file matches a directory, etc, but we don't need to
1683 # missing file matches a directory, etc, but we don't need to
1682 # bother with that: if f has made it to this point, we're sure
1684 # bother with that: if f has made it to this point, we're sure
1683 # it's in the dirstate.
1685 # it's in the dirstate.
1684 deleted.append(f)
1686 deleted.append(f)
1685
1687
1686 return modified, deleted, fixup
1688 return modified, deleted, fixup
1687
1689
1688 def _poststatusfixup(self, status, fixup):
1690 def _poststatusfixup(self, status, fixup):
1689 """update dirstate for files that are actually clean"""
1691 """update dirstate for files that are actually clean"""
1690 poststatus = self._repo.postdsstatus()
1692 poststatus = self._repo.postdsstatus()
1691 if fixup or poststatus:
1693 if fixup or poststatus:
1692 try:
1694 try:
1693 oldid = self._repo.dirstate.identity()
1695 oldid = self._repo.dirstate.identity()
1694
1696
1695 # updating the dirstate is optional
1697 # updating the dirstate is optional
1696 # so we don't wait on the lock
1698 # so we don't wait on the lock
1697 # wlock can invalidate the dirstate, so cache normal _after_
1699 # wlock can invalidate the dirstate, so cache normal _after_
1698 # taking the lock
1700 # taking the lock
1699 with self._repo.wlock(False):
1701 with self._repo.wlock(False):
1700 if self._repo.dirstate.identity() == oldid:
1702 if self._repo.dirstate.identity() == oldid:
1701 if fixup:
1703 if fixup:
1702 normal = self._repo.dirstate.normal
1704 normal = self._repo.dirstate.normal
1703 for f in fixup:
1705 for f in fixup:
1704 normal(f)
1706 normal(f)
1705 # write changes out explicitly, because nesting
1707 # write changes out explicitly, because nesting
1706 # wlock at runtime may prevent 'wlock.release()'
1708 # wlock at runtime may prevent 'wlock.release()'
1707 # after this block from doing so for subsequent
1709 # after this block from doing so for subsequent
1708 # changing files
1710 # changing files
1709 tr = self._repo.currenttransaction()
1711 tr = self._repo.currenttransaction()
1710 self._repo.dirstate.write(tr)
1712 self._repo.dirstate.write(tr)
1711
1713
1712 if poststatus:
1714 if poststatus:
1713 for ps in poststatus:
1715 for ps in poststatus:
1714 ps(self, status)
1716 ps(self, status)
1715 else:
1717 else:
1716 # in this case, writing changes out breaks
1718 # in this case, writing changes out breaks
1717 # consistency, because .hg/dirstate was
1719 # consistency, because .hg/dirstate was
1718 # already changed simultaneously after last
1720 # already changed simultaneously after last
1719 # caching (see also issue5584 for detail)
1721 # caching (see also issue5584 for detail)
1720 self._repo.ui.debug('skip updating dirstate: '
1722 self._repo.ui.debug('skip updating dirstate: '
1721 'identity mismatch\n')
1723 'identity mismatch\n')
1722 except error.LockError:
1724 except error.LockError:
1723 pass
1725 pass
1724 finally:
1726 finally:
1725 # Even if the wlock couldn't be grabbed, clear out the list.
1727 # Even if the wlock couldn't be grabbed, clear out the list.
1726 self._repo.clearpostdsstatus()
1728 self._repo.clearpostdsstatus()
1727
1729
1728 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1730 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1729 '''Gets the status from the dirstate -- internal use only.'''
1731 '''Gets the status from the dirstate -- internal use only.'''
1730 subrepos = []
1732 subrepos = []
1731 if '.hgsub' in self:
1733 if '.hgsub' in self:
1732 subrepos = sorted(self.substate)
1734 subrepos = sorted(self.substate)
1733 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1735 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1734 clean=clean, unknown=unknown)
1736 clean=clean, unknown=unknown)
1735
1737
1736 # check for any possibly clean files
1738 # check for any possibly clean files
1737 fixup = []
1739 fixup = []
1738 if cmp:
1740 if cmp:
1739 modified2, deleted2, fixup = self._checklookup(cmp)
1741 modified2, deleted2, fixup = self._checklookup(cmp)
1740 s.modified.extend(modified2)
1742 s.modified.extend(modified2)
1741 s.deleted.extend(deleted2)
1743 s.deleted.extend(deleted2)
1742
1744
1743 if fixup and clean:
1745 if fixup and clean:
1744 s.clean.extend(fixup)
1746 s.clean.extend(fixup)
1745
1747
1746 self._poststatusfixup(s, fixup)
1748 self._poststatusfixup(s, fixup)
1747
1749
1748 if match.always():
1750 if match.always():
1749 # cache for performance
1751 # cache for performance
1750 if s.unknown or s.ignored or s.clean:
1752 if s.unknown or s.ignored or s.clean:
1751 # "_status" is cached with list*=False in the normal route
1753 # "_status" is cached with list*=False in the normal route
1752 self._status = scmutil.status(s.modified, s.added, s.removed,
1754 self._status = scmutil.status(s.modified, s.added, s.removed,
1753 s.deleted, [], [], [])
1755 s.deleted, [], [], [])
1754 else:
1756 else:
1755 self._status = s
1757 self._status = s
1756
1758
1757 return s
1759 return s
1758
1760
1759 @propertycache
1761 @propertycache
1760 def _manifest(self):
1762 def _manifest(self):
1761 """generate a manifest corresponding to the values in self._status
1763 """generate a manifest corresponding to the values in self._status
1762
1764
1763 This reuse the file nodeid from parent, but we use special node
1765 This reuse the file nodeid from parent, but we use special node
1764 identifiers for added and modified files. This is used by manifests
1766 identifiers for added and modified files. This is used by manifests
1765 merge to see that files are different and by update logic to avoid
1767 merge to see that files are different and by update logic to avoid
1766 deleting newly added files.
1768 deleting newly added files.
1767 """
1769 """
1768 return self._buildstatusmanifest(self._status)
1770 return self._buildstatusmanifest(self._status)
1769
1771
1770 def _buildstatusmanifest(self, status):
1772 def _buildstatusmanifest(self, status):
1771 """Builds a manifest that includes the given status results."""
1773 """Builds a manifest that includes the given status results."""
1772 parents = self.parents()
1774 parents = self.parents()
1773
1775
1774 man = parents[0].manifest().copy()
1776 man = parents[0].manifest().copy()
1775
1777
1776 ff = self._flagfunc
1778 ff = self._flagfunc
1777 for i, l in ((addednodeid, status.added),
1779 for i, l in ((addednodeid, status.added),
1778 (modifiednodeid, status.modified)):
1780 (modifiednodeid, status.modified)):
1779 for f in l:
1781 for f in l:
1780 man[f] = i
1782 man[f] = i
1781 try:
1783 try:
1782 man.setflag(f, ff(f))
1784 man.setflag(f, ff(f))
1783 except OSError:
1785 except OSError:
1784 pass
1786 pass
1785
1787
1786 for f in status.deleted + status.removed:
1788 for f in status.deleted + status.removed:
1787 if f in man:
1789 if f in man:
1788 del man[f]
1790 del man[f]
1789
1791
1790 return man
1792 return man
1791
1793
1792 def _buildstatus(self, other, s, match, listignored, listclean,
1794 def _buildstatus(self, other, s, match, listignored, listclean,
1793 listunknown):
1795 listunknown):
1794 """build a status with respect to another context
1796 """build a status with respect to another context
1795
1797
1796 This includes logic for maintaining the fast path of status when
1798 This includes logic for maintaining the fast path of status when
1797 comparing the working directory against its parent, which is to skip
1799 comparing the working directory against its parent, which is to skip
1798 building a new manifest if self (working directory) is not comparing
1800 building a new manifest if self (working directory) is not comparing
1799 against its parent (repo['.']).
1801 against its parent (repo['.']).
1800 """
1802 """
1801 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1803 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1802 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1804 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1803 # might have accidentally ended up with the entire contents of the file
1805 # might have accidentally ended up with the entire contents of the file
1804 # they are supposed to be linking to.
1806 # they are supposed to be linking to.
1805 s.modified[:] = self._filtersuspectsymlink(s.modified)
1807 s.modified[:] = self._filtersuspectsymlink(s.modified)
1806 if other != self._repo['.']:
1808 if other != self._repo['.']:
1807 s = super(workingctx, self)._buildstatus(other, s, match,
1809 s = super(workingctx, self)._buildstatus(other, s, match,
1808 listignored, listclean,
1810 listignored, listclean,
1809 listunknown)
1811 listunknown)
1810 return s
1812 return s
1811
1813
1812 def _matchstatus(self, other, match):
1814 def _matchstatus(self, other, match):
1813 """override the match method with a filter for directory patterns
1815 """override the match method with a filter for directory patterns
1814
1816
1815 We use inheritance to customize the match.bad method only in cases of
1817 We use inheritance to customize the match.bad method only in cases of
1816 workingctx since it belongs only to the working directory when
1818 workingctx since it belongs only to the working directory when
1817 comparing against the parent changeset.
1819 comparing against the parent changeset.
1818
1820
1819 If we aren't comparing against the working directory's parent, then we
1821 If we aren't comparing against the working directory's parent, then we
1820 just use the default match object sent to us.
1822 just use the default match object sent to us.
1821 """
1823 """
1822 if other != self._repo['.']:
1824 if other != self._repo['.']:
1823 def bad(f, msg):
1825 def bad(f, msg):
1824 # 'f' may be a directory pattern from 'match.files()',
1826 # 'f' may be a directory pattern from 'match.files()',
1825 # so 'f not in ctx1' is not enough
1827 # so 'f not in ctx1' is not enough
1826 if f not in other and not other.hasdir(f):
1828 if f not in other and not other.hasdir(f):
1827 self._repo.ui.warn('%s: %s\n' %
1829 self._repo.ui.warn('%s: %s\n' %
1828 (self._repo.dirstate.pathto(f), msg))
1830 (self._repo.dirstate.pathto(f), msg))
1829 match.bad = bad
1831 match.bad = bad
1830 return match
1832 return match
1831
1833
1832 def markcommitted(self, node):
1834 def markcommitted(self, node):
1833 super(workingctx, self).markcommitted(node)
1835 super(workingctx, self).markcommitted(node)
1834
1836
1835 sparse.aftercommit(self._repo, node)
1837 sparse.aftercommit(self._repo, node)
1836
1838
1837 class committablefilectx(basefilectx):
1839 class committablefilectx(basefilectx):
1838 """A committablefilectx provides common functionality for a file context
1840 """A committablefilectx provides common functionality for a file context
1839 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1841 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1840 def __init__(self, repo, path, filelog=None, ctx=None):
1842 def __init__(self, repo, path, filelog=None, ctx=None):
1841 self._repo = repo
1843 self._repo = repo
1842 self._path = path
1844 self._path = path
1843 self._changeid = None
1845 self._changeid = None
1844 self._filerev = self._filenode = None
1846 self._filerev = self._filenode = None
1845
1847
1846 if filelog is not None:
1848 if filelog is not None:
1847 self._filelog = filelog
1849 self._filelog = filelog
1848 if ctx:
1850 if ctx:
1849 self._changectx = ctx
1851 self._changectx = ctx
1850
1852
1851 def __nonzero__(self):
1853 def __nonzero__(self):
1852 return True
1854 return True
1853
1855
1854 __bool__ = __nonzero__
1856 __bool__ = __nonzero__
1855
1857
1856 def linkrev(self):
1858 def linkrev(self):
1857 # linked to self._changectx no matter if file is modified or not
1859 # linked to self._changectx no matter if file is modified or not
1858 return self.rev()
1860 return self.rev()
1859
1861
1860 def parents(self):
1862 def parents(self):
1861 '''return parent filectxs, following copies if necessary'''
1863 '''return parent filectxs, following copies if necessary'''
1862 def filenode(ctx, path):
1864 def filenode(ctx, path):
1863 return ctx._manifest.get(path, nullid)
1865 return ctx._manifest.get(path, nullid)
1864
1866
1865 path = self._path
1867 path = self._path
1866 fl = self._filelog
1868 fl = self._filelog
1867 pcl = self._changectx._parents
1869 pcl = self._changectx._parents
1868 renamed = self.renamed()
1870 renamed = self.renamed()
1869
1871
1870 if renamed:
1872 if renamed:
1871 pl = [renamed + (None,)]
1873 pl = [renamed + (None,)]
1872 else:
1874 else:
1873 pl = [(path, filenode(pcl[0], path), fl)]
1875 pl = [(path, filenode(pcl[0], path), fl)]
1874
1876
1875 for pc in pcl[1:]:
1877 for pc in pcl[1:]:
1876 pl.append((path, filenode(pc, path), fl))
1878 pl.append((path, filenode(pc, path), fl))
1877
1879
1878 return [self._parentfilectx(p, fileid=n, filelog=l)
1880 return [self._parentfilectx(p, fileid=n, filelog=l)
1879 for p, n, l in pl if n != nullid]
1881 for p, n, l in pl if n != nullid]
1880
1882
1881 def children(self):
1883 def children(self):
1882 return []
1884 return []
1883
1885
1884 class workingfilectx(committablefilectx):
1886 class workingfilectx(committablefilectx):
1885 """A workingfilectx object makes access to data related to a particular
1887 """A workingfilectx object makes access to data related to a particular
1886 file in the working directory convenient."""
1888 file in the working directory convenient."""
1887 def __init__(self, repo, path, filelog=None, workingctx=None):
1889 def __init__(self, repo, path, filelog=None, workingctx=None):
1888 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1890 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1889
1891
1890 @propertycache
1892 @propertycache
1891 def _changectx(self):
1893 def _changectx(self):
1892 return workingctx(self._repo)
1894 return workingctx(self._repo)
1893
1895
1894 def data(self):
1896 def data(self):
1895 return self._repo.wread(self._path)
1897 return self._repo.wread(self._path)
1896 def renamed(self):
1898 def renamed(self):
1897 rp = self._repo.dirstate.copied(self._path)
1899 rp = self._repo.dirstate.copied(self._path)
1898 if not rp:
1900 if not rp:
1899 return None
1901 return None
1900 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1902 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1901
1903
1902 def size(self):
1904 def size(self):
1903 return self._repo.wvfs.lstat(self._path).st_size
1905 return self._repo.wvfs.lstat(self._path).st_size
1904 def date(self):
1906 def date(self):
1905 t, tz = self._changectx.date()
1907 t, tz = self._changectx.date()
1906 try:
1908 try:
1907 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1909 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1908 except OSError as err:
1910 except OSError as err:
1909 if err.errno != errno.ENOENT:
1911 if err.errno != errno.ENOENT:
1910 raise
1912 raise
1911 return (t, tz)
1913 return (t, tz)
1912
1914
1913 def exists(self):
1915 def exists(self):
1914 return self._repo.wvfs.exists(self._path)
1916 return self._repo.wvfs.exists(self._path)
1915
1917
1916 def lexists(self):
1918 def lexists(self):
1917 return self._repo.wvfs.lexists(self._path)
1919 return self._repo.wvfs.lexists(self._path)
1918
1920
1919 def audit(self):
1921 def audit(self):
1920 return self._repo.wvfs.audit(self._path)
1922 return self._repo.wvfs.audit(self._path)
1921
1923
1922 def cmp(self, fctx):
1924 def cmp(self, fctx):
1923 """compare with other file context
1925 """compare with other file context
1924
1926
1925 returns True if different than fctx.
1927 returns True if different than fctx.
1926 """
1928 """
1927 # fctx should be a filectx (not a workingfilectx)
1929 # fctx should be a filectx (not a workingfilectx)
1928 # invert comparison to reuse the same code path
1930 # invert comparison to reuse the same code path
1929 return fctx.cmp(self)
1931 return fctx.cmp(self)
1930
1932
1931 def remove(self, ignoremissing=False):
1933 def remove(self, ignoremissing=False):
1932 """wraps unlink for a repo's working directory"""
1934 """wraps unlink for a repo's working directory"""
1933 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1935 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1934
1936
1935 def write(self, data, flags, backgroundclose=False):
1937 def write(self, data, flags, backgroundclose=False):
1936 """wraps repo.wwrite"""
1938 """wraps repo.wwrite"""
1937 self._repo.wwrite(self._path, data, flags,
1939 self._repo.wwrite(self._path, data, flags,
1938 backgroundclose=backgroundclose)
1940 backgroundclose=backgroundclose)
1939
1941
1940 def markcopied(self, src):
1942 def markcopied(self, src):
1941 """marks this file a copy of `src`"""
1943 """marks this file a copy of `src`"""
1942 if self._repo.dirstate[self._path] in "nma":
1944 if self._repo.dirstate[self._path] in "nma":
1943 self._repo.dirstate.copy(src, self._path)
1945 self._repo.dirstate.copy(src, self._path)
1944
1946
1945 def clearunknown(self):
1947 def clearunknown(self):
1946 """Removes conflicting items in the working directory so that
1948 """Removes conflicting items in the working directory so that
1947 ``write()`` can be called successfully.
1949 ``write()`` can be called successfully.
1948 """
1950 """
1949 wvfs = self._repo.wvfs
1951 wvfs = self._repo.wvfs
1950 f = self._path
1952 f = self._path
1951 wvfs.audit(f)
1953 wvfs.audit(f)
1952 if wvfs.isdir(f) and not wvfs.islink(f):
1954 if wvfs.isdir(f) and not wvfs.islink(f):
1953 wvfs.rmtree(f, forcibly=True)
1955 wvfs.rmtree(f, forcibly=True)
1954 for p in reversed(list(util.finddirs(f))):
1956 for p in reversed(list(util.finddirs(f))):
1955 if wvfs.isfileorlink(p):
1957 if wvfs.isfileorlink(p):
1956 wvfs.unlink(p)
1958 wvfs.unlink(p)
1957 break
1959 break
1958
1960
1959 def setflags(self, l, x):
1961 def setflags(self, l, x):
1960 self._repo.wvfs.setflags(self._path, l, x)
1962 self._repo.wvfs.setflags(self._path, l, x)
1961
1963
1962 class overlayworkingctx(workingctx):
1964 class overlayworkingctx(workingctx):
1963 """Wraps another mutable context with a write-back cache that can be flushed
1965 """Wraps another mutable context with a write-back cache that can be flushed
1964 at a later time.
1966 at a later time.
1965
1967
1966 self._cache[path] maps to a dict with keys: {
1968 self._cache[path] maps to a dict with keys: {
1967 'exists': bool?
1969 'exists': bool?
1968 'date': date?
1970 'date': date?
1969 'data': str?
1971 'data': str?
1970 'flags': str?
1972 'flags': str?
1971 }
1973 }
1972 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1974 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1973 is `False`, the file was deleted.
1975 is `False`, the file was deleted.
1974 """
1976 """
1975
1977
1976 def __init__(self, repo, wrappedctx):
1978 def __init__(self, repo, wrappedctx):
1977 super(overlayworkingctx, self).__init__(repo)
1979 super(overlayworkingctx, self).__init__(repo)
1978 self._repo = repo
1980 self._repo = repo
1979 self._wrappedctx = wrappedctx
1981 self._wrappedctx = wrappedctx
1980 self._clean()
1982 self._clean()
1981
1983
1982 def data(self, path):
1984 def data(self, path):
1983 if self.isdirty(path):
1985 if self.isdirty(path):
1984 if self._cache[path]['exists']:
1986 if self._cache[path]['exists']:
1985 if self._cache[path]['data']:
1987 if self._cache[path]['data']:
1986 return self._cache[path]['data']
1988 return self._cache[path]['data']
1987 else:
1989 else:
1988 # Must fallback here, too, because we only set flags.
1990 # Must fallback here, too, because we only set flags.
1989 return self._wrappedctx[path].data()
1991 return self._wrappedctx[path].data()
1990 else:
1992 else:
1991 raise error.ProgrammingError("No such file or directory: %s" %
1993 raise error.ProgrammingError("No such file or directory: %s" %
1992 self._path)
1994 self._path)
1993 else:
1995 else:
1994 return self._wrappedctx[path].data()
1996 return self._wrappedctx[path].data()
1995
1997
1996 def isinmemory(self):
1998 def isinmemory(self):
1997 return True
1999 return True
1998
2000
1999 def filedate(self, path):
2001 def filedate(self, path):
2000 if self.isdirty(path):
2002 if self.isdirty(path):
2001 return self._cache[path]['date']
2003 return self._cache[path]['date']
2002 else:
2004 else:
2003 return self._wrappedctx[path].date()
2005 return self._wrappedctx[path].date()
2004
2006
2005 def flags(self, path):
2007 def flags(self, path):
2006 if self.isdirty(path):
2008 if self.isdirty(path):
2007 if self._cache[path]['exists']:
2009 if self._cache[path]['exists']:
2008 return self._cache[path]['flags']
2010 return self._cache[path]['flags']
2009 else:
2011 else:
2010 raise error.ProgrammingError("No such file or directory: %s" %
2012 raise error.ProgrammingError("No such file or directory: %s" %
2011 self._path)
2013 self._path)
2012 else:
2014 else:
2013 return self._wrappedctx[path].flags()
2015 return self._wrappedctx[path].flags()
2014
2016
2015 def write(self, path, data, flags=''):
2017 def write(self, path, data, flags=''):
2016 if data is None:
2018 if data is None:
2017 raise error.ProgrammingError("data must be non-None")
2019 raise error.ProgrammingError("data must be non-None")
2018 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2020 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2019 flags=flags)
2021 flags=flags)
2020
2022
2021 def setflags(self, path, l, x):
2023 def setflags(self, path, l, x):
2022 self._markdirty(path, exists=True, date=util.makedate(),
2024 self._markdirty(path, exists=True, date=util.makedate(),
2023 flags=(l and 'l' or '') + (x and 'x' or ''))
2025 flags=(l and 'l' or '') + (x and 'x' or ''))
2024
2026
2025 def remove(self, path):
2027 def remove(self, path):
2026 self._markdirty(path, exists=False)
2028 self._markdirty(path, exists=False)
2027
2029
2028 def exists(self, path):
2030 def exists(self, path):
2029 """exists behaves like `lexists`, but needs to follow symlinks and
2031 """exists behaves like `lexists`, but needs to follow symlinks and
2030 return False if they are broken.
2032 return False if they are broken.
2031 """
2033 """
2032 if self.isdirty(path):
2034 if self.isdirty(path):
2033 # If this path exists and is a symlink, "follow" it by calling
2035 # If this path exists and is a symlink, "follow" it by calling
2034 # exists on the destination path.
2036 # exists on the destination path.
2035 if (self._cache[path]['exists'] and
2037 if (self._cache[path]['exists'] and
2036 'l' in self._cache[path]['flags']):
2038 'l' in self._cache[path]['flags']):
2037 return self.exists(self._cache[path]['data'].strip())
2039 return self.exists(self._cache[path]['data'].strip())
2038 else:
2040 else:
2039 return self._cache[path]['exists']
2041 return self._cache[path]['exists']
2040 return self._wrappedctx[path].exists()
2042 return self._wrappedctx[path].exists()
2041
2043
2042 def lexists(self, path):
2044 def lexists(self, path):
2043 """lexists returns True if the path exists"""
2045 """lexists returns True if the path exists"""
2044 if self.isdirty(path):
2046 if self.isdirty(path):
2045 return self._cache[path]['exists']
2047 return self._cache[path]['exists']
2046 return self._wrappedctx[path].lexists()
2048 return self._wrappedctx[path].lexists()
2047
2049
2048 def size(self, path):
2050 def size(self, path):
2049 if self.isdirty(path):
2051 if self.isdirty(path):
2050 if self._cache[path]['exists']:
2052 if self._cache[path]['exists']:
2051 return len(self._cache[path]['data'])
2053 return len(self._cache[path]['data'])
2052 else:
2054 else:
2053 raise error.ProgrammingError("No such file or directory: %s" %
2055 raise error.ProgrammingError("No such file or directory: %s" %
2054 self._path)
2056 self._path)
2055 return self._wrappedctx[path].size()
2057 return self._wrappedctx[path].size()
2056
2058
2057 def flushall(self):
2059 def flushall(self):
2058 for path in self._writeorder:
2060 for path in self._writeorder:
2059 entry = self._cache[path]
2061 entry = self._cache[path]
2060 if entry['exists']:
2062 if entry['exists']:
2061 self._wrappedctx[path].clearunknown()
2063 self._wrappedctx[path].clearunknown()
2062 if entry['data'] is not None:
2064 if entry['data'] is not None:
2063 if entry['flags'] is None:
2065 if entry['flags'] is None:
2064 raise error.ProgrammingError('data set but not flags')
2066 raise error.ProgrammingError('data set but not flags')
2065 self._wrappedctx[path].write(
2067 self._wrappedctx[path].write(
2066 entry['data'],
2068 entry['data'],
2067 entry['flags'])
2069 entry['flags'])
2068 else:
2070 else:
2069 self._wrappedctx[path].setflags(
2071 self._wrappedctx[path].setflags(
2070 'l' in entry['flags'],
2072 'l' in entry['flags'],
2071 'x' in entry['flags'])
2073 'x' in entry['flags'])
2072 else:
2074 else:
2073 self._wrappedctx[path].remove(path)
2075 self._wrappedctx[path].remove(path)
2074 self._clean()
2076 self._clean()
2075
2077
2076 def isdirty(self, path):
2078 def isdirty(self, path):
2077 return path in self._cache
2079 return path in self._cache
2078
2080
2079 def _clean(self):
2081 def _clean(self):
2080 self._cache = {}
2082 self._cache = {}
2081 self._writeorder = []
2083 self._writeorder = []
2082
2084
2083 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2085 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2084 if path not in self._cache:
2086 if path not in self._cache:
2085 self._writeorder.append(path)
2087 self._writeorder.append(path)
2086
2088
2087 self._cache[path] = {
2089 self._cache[path] = {
2088 'exists': exists,
2090 'exists': exists,
2089 'data': data,
2091 'data': data,
2090 'date': date,
2092 'date': date,
2091 'flags': flags,
2093 'flags': flags,
2092 }
2094 }
2093
2095
2094 def filectx(self, path, filelog=None):
2096 def filectx(self, path, filelog=None):
2095 return overlayworkingfilectx(self._repo, path, parent=self,
2097 return overlayworkingfilectx(self._repo, path, parent=self,
2096 filelog=filelog)
2098 filelog=filelog)
2097
2099
2098 class overlayworkingfilectx(workingfilectx):
2100 class overlayworkingfilectx(workingfilectx):
2099 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2101 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2100 cache, which can be flushed through later by calling ``flush()``."""
2102 cache, which can be flushed through later by calling ``flush()``."""
2101
2103
2102 def __init__(self, repo, path, filelog=None, parent=None):
2104 def __init__(self, repo, path, filelog=None, parent=None):
2103 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2105 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2104 parent)
2106 parent)
2105 self._repo = repo
2107 self._repo = repo
2106 self._parent = parent
2108 self._parent = parent
2107 self._path = path
2109 self._path = path
2108
2110
2109 def cmp(self, fctx):
2111 def cmp(self, fctx):
2110 return self.data() != fctx.data()
2112 return self.data() != fctx.data()
2111
2113
2112 def ctx(self):
2114 def ctx(self):
2113 return self._parent
2115 return self._parent
2114
2116
2115 def data(self):
2117 def data(self):
2116 return self._parent.data(self._path)
2118 return self._parent.data(self._path)
2117
2119
2118 def date(self):
2120 def date(self):
2119 return self._parent.filedate(self._path)
2121 return self._parent.filedate(self._path)
2120
2122
2121 def exists(self):
2123 def exists(self):
2122 return self.lexists()
2124 return self.lexists()
2123
2125
2124 def lexists(self):
2126 def lexists(self):
2125 return self._parent.exists(self._path)
2127 return self._parent.exists(self._path)
2126
2128
2127 def renamed(self):
2129 def renamed(self):
2128 # Copies are currently tracked in the dirstate as before. Straight copy
2130 # Copies are currently tracked in the dirstate as before. Straight copy
2129 # from workingfilectx.
2131 # from workingfilectx.
2130 rp = self._repo.dirstate.copied(self._path)
2132 rp = self._repo.dirstate.copied(self._path)
2131 if not rp:
2133 if not rp:
2132 return None
2134 return None
2133 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2135 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2134
2136
2135 def size(self):
2137 def size(self):
2136 return self._parent.size(self._path)
2138 return self._parent.size(self._path)
2137
2139
2138 def audit(self):
2140 def audit(self):
2139 pass
2141 pass
2140
2142
2141 def flags(self):
2143 def flags(self):
2142 return self._parent.flags(self._path)
2144 return self._parent.flags(self._path)
2143
2145
2144 def setflags(self, islink, isexec):
2146 def setflags(self, islink, isexec):
2145 return self._parent.setflags(self._path, islink, isexec)
2147 return self._parent.setflags(self._path, islink, isexec)
2146
2148
2147 def write(self, data, flags, backgroundclose=False):
2149 def write(self, data, flags, backgroundclose=False):
2148 return self._parent.write(self._path, data, flags)
2150 return self._parent.write(self._path, data, flags)
2149
2151
2150 def remove(self, ignoremissing=False):
2152 def remove(self, ignoremissing=False):
2151 return self._parent.remove(self._path)
2153 return self._parent.remove(self._path)
2152
2154
2153 class workingcommitctx(workingctx):
2155 class workingcommitctx(workingctx):
2154 """A workingcommitctx object makes access to data related to
2156 """A workingcommitctx object makes access to data related to
2155 the revision being committed convenient.
2157 the revision being committed convenient.
2156
2158
2157 This hides changes in the working directory, if they aren't
2159 This hides changes in the working directory, if they aren't
2158 committed in this context.
2160 committed in this context.
2159 """
2161 """
2160 def __init__(self, repo, changes,
2162 def __init__(self, repo, changes,
2161 text="", user=None, date=None, extra=None):
2163 text="", user=None, date=None, extra=None):
2162 super(workingctx, self).__init__(repo, text, user, date, extra,
2164 super(workingctx, self).__init__(repo, text, user, date, extra,
2163 changes)
2165 changes)
2164
2166
2165 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2167 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2166 """Return matched files only in ``self._status``
2168 """Return matched files only in ``self._status``
2167
2169
2168 Uncommitted files appear "clean" via this context, even if
2170 Uncommitted files appear "clean" via this context, even if
2169 they aren't actually so in the working directory.
2171 they aren't actually so in the working directory.
2170 """
2172 """
2171 if clean:
2173 if clean:
2172 clean = [f for f in self._manifest if f not in self._changedset]
2174 clean = [f for f in self._manifest if f not in self._changedset]
2173 else:
2175 else:
2174 clean = []
2176 clean = []
2175 return scmutil.status([f for f in self._status.modified if match(f)],
2177 return scmutil.status([f for f in self._status.modified if match(f)],
2176 [f for f in self._status.added if match(f)],
2178 [f for f in self._status.added if match(f)],
2177 [f for f in self._status.removed if match(f)],
2179 [f for f in self._status.removed if match(f)],
2178 [], [], [], clean)
2180 [], [], [], clean)
2179
2181
2180 @propertycache
2182 @propertycache
2181 def _changedset(self):
2183 def _changedset(self):
2182 """Return the set of files changed in this context
2184 """Return the set of files changed in this context
2183 """
2185 """
2184 changed = set(self._status.modified)
2186 changed = set(self._status.modified)
2185 changed.update(self._status.added)
2187 changed.update(self._status.added)
2186 changed.update(self._status.removed)
2188 changed.update(self._status.removed)
2187 return changed
2189 return changed
2188
2190
2189 def makecachingfilectxfn(func):
2191 def makecachingfilectxfn(func):
2190 """Create a filectxfn that caches based on the path.
2192 """Create a filectxfn that caches based on the path.
2191
2193
2192 We can't use util.cachefunc because it uses all arguments as the cache
2194 We can't use util.cachefunc because it uses all arguments as the cache
2193 key and this creates a cycle since the arguments include the repo and
2195 key and this creates a cycle since the arguments include the repo and
2194 memctx.
2196 memctx.
2195 """
2197 """
2196 cache = {}
2198 cache = {}
2197
2199
2198 def getfilectx(repo, memctx, path):
2200 def getfilectx(repo, memctx, path):
2199 if path not in cache:
2201 if path not in cache:
2200 cache[path] = func(repo, memctx, path)
2202 cache[path] = func(repo, memctx, path)
2201 return cache[path]
2203 return cache[path]
2202
2204
2203 return getfilectx
2205 return getfilectx
2204
2206
2205 def memfilefromctx(ctx):
2207 def memfilefromctx(ctx):
2206 """Given a context return a memfilectx for ctx[path]
2208 """Given a context return a memfilectx for ctx[path]
2207
2209
2208 This is a convenience method for building a memctx based on another
2210 This is a convenience method for building a memctx based on another
2209 context.
2211 context.
2210 """
2212 """
2211 def getfilectx(repo, memctx, path):
2213 def getfilectx(repo, memctx, path):
2212 fctx = ctx[path]
2214 fctx = ctx[path]
2213 # this is weird but apparently we only keep track of one parent
2215 # this is weird but apparently we only keep track of one parent
2214 # (why not only store that instead of a tuple?)
2216 # (why not only store that instead of a tuple?)
2215 copied = fctx.renamed()
2217 copied = fctx.renamed()
2216 if copied:
2218 if copied:
2217 copied = copied[0]
2219 copied = copied[0]
2218 return memfilectx(repo, path, fctx.data(),
2220 return memfilectx(repo, path, fctx.data(),
2219 islink=fctx.islink(), isexec=fctx.isexec(),
2221 islink=fctx.islink(), isexec=fctx.isexec(),
2220 copied=copied, memctx=memctx)
2222 copied=copied, memctx=memctx)
2221
2223
2222 return getfilectx
2224 return getfilectx
2223
2225
2224 def memfilefrompatch(patchstore):
2226 def memfilefrompatch(patchstore):
2225 """Given a patch (e.g. patchstore object) return a memfilectx
2227 """Given a patch (e.g. patchstore object) return a memfilectx
2226
2228
2227 This is a convenience method for building a memctx based on a patchstore.
2229 This is a convenience method for building a memctx based on a patchstore.
2228 """
2230 """
2229 def getfilectx(repo, memctx, path):
2231 def getfilectx(repo, memctx, path):
2230 data, mode, copied = patchstore.getfile(path)
2232 data, mode, copied = patchstore.getfile(path)
2231 if data is None:
2233 if data is None:
2232 return None
2234 return None
2233 islink, isexec = mode
2235 islink, isexec = mode
2234 return memfilectx(repo, path, data, islink=islink,
2236 return memfilectx(repo, path, data, islink=islink,
2235 isexec=isexec, copied=copied,
2237 isexec=isexec, copied=copied,
2236 memctx=memctx)
2238 memctx=memctx)
2237
2239
2238 return getfilectx
2240 return getfilectx
2239
2241
2240 class memctx(committablectx):
2242 class memctx(committablectx):
2241 """Use memctx to perform in-memory commits via localrepo.commitctx().
2243 """Use memctx to perform in-memory commits via localrepo.commitctx().
2242
2244
2243 Revision information is supplied at initialization time while
2245 Revision information is supplied at initialization time while
2244 related files data and is made available through a callback
2246 related files data and is made available through a callback
2245 mechanism. 'repo' is the current localrepo, 'parents' is a
2247 mechanism. 'repo' is the current localrepo, 'parents' is a
2246 sequence of two parent revisions identifiers (pass None for every
2248 sequence of two parent revisions identifiers (pass None for every
2247 missing parent), 'text' is the commit message and 'files' lists
2249 missing parent), 'text' is the commit message and 'files' lists
2248 names of files touched by the revision (normalized and relative to
2250 names of files touched by the revision (normalized and relative to
2249 repository root).
2251 repository root).
2250
2252
2251 filectxfn(repo, memctx, path) is a callable receiving the
2253 filectxfn(repo, memctx, path) is a callable receiving the
2252 repository, the current memctx object and the normalized path of
2254 repository, the current memctx object and the normalized path of
2253 requested file, relative to repository root. It is fired by the
2255 requested file, relative to repository root. It is fired by the
2254 commit function for every file in 'files', but calls order is
2256 commit function for every file in 'files', but calls order is
2255 undefined. If the file is available in the revision being
2257 undefined. If the file is available in the revision being
2256 committed (updated or added), filectxfn returns a memfilectx
2258 committed (updated or added), filectxfn returns a memfilectx
2257 object. If the file was removed, filectxfn return None for recent
2259 object. If the file was removed, filectxfn return None for recent
2258 Mercurial. Moved files are represented by marking the source file
2260 Mercurial. Moved files are represented by marking the source file
2259 removed and the new file added with copy information (see
2261 removed and the new file added with copy information (see
2260 memfilectx).
2262 memfilectx).
2261
2263
2262 user receives the committer name and defaults to current
2264 user receives the committer name and defaults to current
2263 repository username, date is the commit date in any format
2265 repository username, date is the commit date in any format
2264 supported by util.parsedate() and defaults to current date, extra
2266 supported by util.parsedate() and defaults to current date, extra
2265 is a dictionary of metadata or is left empty.
2267 is a dictionary of metadata or is left empty.
2266 """
2268 """
2267
2269
2268 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2270 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2269 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2271 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2270 # this field to determine what to do in filectxfn.
2272 # this field to determine what to do in filectxfn.
2271 _returnnoneformissingfiles = True
2273 _returnnoneformissingfiles = True
2272
2274
2273 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2275 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2274 date=None, extra=None, branch=None, editor=False):
2276 date=None, extra=None, branch=None, editor=False):
2275 super(memctx, self).__init__(repo, text, user, date, extra)
2277 super(memctx, self).__init__(repo, text, user, date, extra)
2276 self._rev = None
2278 self._rev = None
2277 self._node = None
2279 self._node = None
2278 parents = [(p or nullid) for p in parents]
2280 parents = [(p or nullid) for p in parents]
2279 p1, p2 = parents
2281 p1, p2 = parents
2280 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2282 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2281 files = sorted(set(files))
2283 files = sorted(set(files))
2282 self._files = files
2284 self._files = files
2283 if branch is not None:
2285 if branch is not None:
2284 self._extra['branch'] = encoding.fromlocal(branch)
2286 self._extra['branch'] = encoding.fromlocal(branch)
2285 self.substate = {}
2287 self.substate = {}
2286
2288
2287 if isinstance(filectxfn, patch.filestore):
2289 if isinstance(filectxfn, patch.filestore):
2288 filectxfn = memfilefrompatch(filectxfn)
2290 filectxfn = memfilefrompatch(filectxfn)
2289 elif not callable(filectxfn):
2291 elif not callable(filectxfn):
2290 # if store is not callable, wrap it in a function
2292 # if store is not callable, wrap it in a function
2291 filectxfn = memfilefromctx(filectxfn)
2293 filectxfn = memfilefromctx(filectxfn)
2292
2294
2293 # memoizing increases performance for e.g. vcs convert scenarios.
2295 # memoizing increases performance for e.g. vcs convert scenarios.
2294 self._filectxfn = makecachingfilectxfn(filectxfn)
2296 self._filectxfn = makecachingfilectxfn(filectxfn)
2295
2297
2296 if editor:
2298 if editor:
2297 self._text = editor(self._repo, self, [])
2299 self._text = editor(self._repo, self, [])
2298 self._repo.savecommitmessage(self._text)
2300 self._repo.savecommitmessage(self._text)
2299
2301
2300 def filectx(self, path, filelog=None):
2302 def filectx(self, path, filelog=None):
2301 """get a file context from the working directory
2303 """get a file context from the working directory
2302
2304
2303 Returns None if file doesn't exist and should be removed."""
2305 Returns None if file doesn't exist and should be removed."""
2304 return self._filectxfn(self._repo, self, path)
2306 return self._filectxfn(self._repo, self, path)
2305
2307
2306 def commit(self):
2308 def commit(self):
2307 """commit context to the repo"""
2309 """commit context to the repo"""
2308 return self._repo.commitctx(self)
2310 return self._repo.commitctx(self)
2309
2311
2310 @propertycache
2312 @propertycache
2311 def _manifest(self):
2313 def _manifest(self):
2312 """generate a manifest based on the return values of filectxfn"""
2314 """generate a manifest based on the return values of filectxfn"""
2313
2315
2314 # keep this simple for now; just worry about p1
2316 # keep this simple for now; just worry about p1
2315 pctx = self._parents[0]
2317 pctx = self._parents[0]
2316 man = pctx.manifest().copy()
2318 man = pctx.manifest().copy()
2317
2319
2318 for f in self._status.modified:
2320 for f in self._status.modified:
2319 p1node = nullid
2321 p1node = nullid
2320 p2node = nullid
2322 p2node = nullid
2321 p = pctx[f].parents() # if file isn't in pctx, check p2?
2323 p = pctx[f].parents() # if file isn't in pctx, check p2?
2322 if len(p) > 0:
2324 if len(p) > 0:
2323 p1node = p[0].filenode()
2325 p1node = p[0].filenode()
2324 if len(p) > 1:
2326 if len(p) > 1:
2325 p2node = p[1].filenode()
2327 p2node = p[1].filenode()
2326 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2328 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2327
2329
2328 for f in self._status.added:
2330 for f in self._status.added:
2329 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2331 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2330
2332
2331 for f in self._status.removed:
2333 for f in self._status.removed:
2332 if f in man:
2334 if f in man:
2333 del man[f]
2335 del man[f]
2334
2336
2335 return man
2337 return man
2336
2338
2337 @propertycache
2339 @propertycache
2338 def _status(self):
2340 def _status(self):
2339 """Calculate exact status from ``files`` specified at construction
2341 """Calculate exact status from ``files`` specified at construction
2340 """
2342 """
2341 man1 = self.p1().manifest()
2343 man1 = self.p1().manifest()
2342 p2 = self._parents[1]
2344 p2 = self._parents[1]
2343 # "1 < len(self._parents)" can't be used for checking
2345 # "1 < len(self._parents)" can't be used for checking
2344 # existence of the 2nd parent, because "memctx._parents" is
2346 # existence of the 2nd parent, because "memctx._parents" is
2345 # explicitly initialized by the list, of which length is 2.
2347 # explicitly initialized by the list, of which length is 2.
2346 if p2.node() != nullid:
2348 if p2.node() != nullid:
2347 man2 = p2.manifest()
2349 man2 = p2.manifest()
2348 managing = lambda f: f in man1 or f in man2
2350 managing = lambda f: f in man1 or f in man2
2349 else:
2351 else:
2350 managing = lambda f: f in man1
2352 managing = lambda f: f in man1
2351
2353
2352 modified, added, removed = [], [], []
2354 modified, added, removed = [], [], []
2353 for f in self._files:
2355 for f in self._files:
2354 if not managing(f):
2356 if not managing(f):
2355 added.append(f)
2357 added.append(f)
2356 elif self[f]:
2358 elif self[f]:
2357 modified.append(f)
2359 modified.append(f)
2358 else:
2360 else:
2359 removed.append(f)
2361 removed.append(f)
2360
2362
2361 return scmutil.status(modified, added, removed, [], [], [], [])
2363 return scmutil.status(modified, added, removed, [], [], [], [])
2362
2364
2363 class memfilectx(committablefilectx):
2365 class memfilectx(committablefilectx):
2364 """memfilectx represents an in-memory file to commit.
2366 """memfilectx represents an in-memory file to commit.
2365
2367
2366 See memctx and committablefilectx for more details.
2368 See memctx and committablefilectx for more details.
2367 """
2369 """
2368 def __init__(self, repo, path, data, islink=False,
2370 def __init__(self, repo, path, data, islink=False,
2369 isexec=False, copied=None, memctx=None):
2371 isexec=False, copied=None, memctx=None):
2370 """
2372 """
2371 path is the normalized file path relative to repository root.
2373 path is the normalized file path relative to repository root.
2372 data is the file content as a string.
2374 data is the file content as a string.
2373 islink is True if the file is a symbolic link.
2375 islink is True if the file is a symbolic link.
2374 isexec is True if the file is executable.
2376 isexec is True if the file is executable.
2375 copied is the source file path if current file was copied in the
2377 copied is the source file path if current file was copied in the
2376 revision being committed, or None."""
2378 revision being committed, or None."""
2377 super(memfilectx, self).__init__(repo, path, None, memctx)
2379 super(memfilectx, self).__init__(repo, path, None, memctx)
2378 self._data = data
2380 self._data = data
2379 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2381 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2380 self._copied = None
2382 self._copied = None
2381 if copied:
2383 if copied:
2382 self._copied = (copied, nullid)
2384 self._copied = (copied, nullid)
2383
2385
2384 def data(self):
2386 def data(self):
2385 return self._data
2387 return self._data
2386
2388
2387 def remove(self, ignoremissing=False):
2389 def remove(self, ignoremissing=False):
2388 """wraps unlink for a repo's working directory"""
2390 """wraps unlink for a repo's working directory"""
2389 # need to figure out what to do here
2391 # need to figure out what to do here
2390 del self._changectx[self._path]
2392 del self._changectx[self._path]
2391
2393
2392 def write(self, data, flags):
2394 def write(self, data, flags):
2393 """wraps repo.wwrite"""
2395 """wraps repo.wwrite"""
2394 self._data = data
2396 self._data = data
2395
2397
2396 class overlayfilectx(committablefilectx):
2398 class overlayfilectx(committablefilectx):
2397 """Like memfilectx but take an original filectx and optional parameters to
2399 """Like memfilectx but take an original filectx and optional parameters to
2398 override parts of it. This is useful when fctx.data() is expensive (i.e.
2400 override parts of it. This is useful when fctx.data() is expensive (i.e.
2399 flag processor is expensive) and raw data, flags, and filenode could be
2401 flag processor is expensive) and raw data, flags, and filenode could be
2400 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2402 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2401 """
2403 """
2402
2404
2403 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2405 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2404 copied=None, ctx=None):
2406 copied=None, ctx=None):
2405 """originalfctx: filecontext to duplicate
2407 """originalfctx: filecontext to duplicate
2406
2408
2407 datafunc: None or a function to override data (file content). It is a
2409 datafunc: None or a function to override data (file content). It is a
2408 function to be lazy. path, flags, copied, ctx: None or overridden value
2410 function to be lazy. path, flags, copied, ctx: None or overridden value
2409
2411
2410 copied could be (path, rev), or False. copied could also be just path,
2412 copied could be (path, rev), or False. copied could also be just path,
2411 and will be converted to (path, nullid). This simplifies some callers.
2413 and will be converted to (path, nullid). This simplifies some callers.
2412 """
2414 """
2413
2415
2414 if path is None:
2416 if path is None:
2415 path = originalfctx.path()
2417 path = originalfctx.path()
2416 if ctx is None:
2418 if ctx is None:
2417 ctx = originalfctx.changectx()
2419 ctx = originalfctx.changectx()
2418 ctxmatch = lambda: True
2420 ctxmatch = lambda: True
2419 else:
2421 else:
2420 ctxmatch = lambda: ctx == originalfctx.changectx()
2422 ctxmatch = lambda: ctx == originalfctx.changectx()
2421
2423
2422 repo = originalfctx.repo()
2424 repo = originalfctx.repo()
2423 flog = originalfctx.filelog()
2425 flog = originalfctx.filelog()
2424 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2426 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2425
2427
2426 if copied is None:
2428 if copied is None:
2427 copied = originalfctx.renamed()
2429 copied = originalfctx.renamed()
2428 copiedmatch = lambda: True
2430 copiedmatch = lambda: True
2429 else:
2431 else:
2430 if copied and not isinstance(copied, tuple):
2432 if copied and not isinstance(copied, tuple):
2431 # repo._filecommit will recalculate copyrev so nullid is okay
2433 # repo._filecommit will recalculate copyrev so nullid is okay
2432 copied = (copied, nullid)
2434 copied = (copied, nullid)
2433 copiedmatch = lambda: copied == originalfctx.renamed()
2435 copiedmatch = lambda: copied == originalfctx.renamed()
2434
2436
2435 # When data, copied (could affect data), ctx (could affect filelog
2437 # When data, copied (could affect data), ctx (could affect filelog
2436 # parents) are not overridden, rawdata, rawflags, and filenode may be
2438 # parents) are not overridden, rawdata, rawflags, and filenode may be
2437 # reused (repo._filecommit should double check filelog parents).
2439 # reused (repo._filecommit should double check filelog parents).
2438 #
2440 #
2439 # path, flags are not hashed in filelog (but in manifestlog) so they do
2441 # path, flags are not hashed in filelog (but in manifestlog) so they do
2440 # not affect reusable here.
2442 # not affect reusable here.
2441 #
2443 #
2442 # If ctx or copied is overridden to a same value with originalfctx,
2444 # If ctx or copied is overridden to a same value with originalfctx,
2443 # still consider it's reusable. originalfctx.renamed() may be a bit
2445 # still consider it's reusable. originalfctx.renamed() may be a bit
2444 # expensive so it's not called unless necessary. Assuming datafunc is
2446 # expensive so it's not called unless necessary. Assuming datafunc is
2445 # always expensive, do not call it for this "reusable" test.
2447 # always expensive, do not call it for this "reusable" test.
2446 reusable = datafunc is None and ctxmatch() and copiedmatch()
2448 reusable = datafunc is None and ctxmatch() and copiedmatch()
2447
2449
2448 if datafunc is None:
2450 if datafunc is None:
2449 datafunc = originalfctx.data
2451 datafunc = originalfctx.data
2450 if flags is None:
2452 if flags is None:
2451 flags = originalfctx.flags()
2453 flags = originalfctx.flags()
2452
2454
2453 self._datafunc = datafunc
2455 self._datafunc = datafunc
2454 self._flags = flags
2456 self._flags = flags
2455 self._copied = copied
2457 self._copied = copied
2456
2458
2457 if reusable:
2459 if reusable:
2458 # copy extra fields from originalfctx
2460 # copy extra fields from originalfctx
2459 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2461 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2460 for attr_ in attrs:
2462 for attr_ in attrs:
2461 if util.safehasattr(originalfctx, attr_):
2463 if util.safehasattr(originalfctx, attr_):
2462 setattr(self, attr_, getattr(originalfctx, attr_))
2464 setattr(self, attr_, getattr(originalfctx, attr_))
2463
2465
2464 def data(self):
2466 def data(self):
2465 return self._datafunc()
2467 return self._datafunc()
2466
2468
2467 class metadataonlyctx(committablectx):
2469 class metadataonlyctx(committablectx):
2468 """Like memctx but it's reusing the manifest of different commit.
2470 """Like memctx but it's reusing the manifest of different commit.
2469 Intended to be used by lightweight operations that are creating
2471 Intended to be used by lightweight operations that are creating
2470 metadata-only changes.
2472 metadata-only changes.
2471
2473
2472 Revision information is supplied at initialization time. 'repo' is the
2474 Revision information is supplied at initialization time. 'repo' is the
2473 current localrepo, 'ctx' is original revision which manifest we're reuisng
2475 current localrepo, 'ctx' is original revision which manifest we're reuisng
2474 'parents' is a sequence of two parent revisions identifiers (pass None for
2476 'parents' is a sequence of two parent revisions identifiers (pass None for
2475 every missing parent), 'text' is the commit.
2477 every missing parent), 'text' is the commit.
2476
2478
2477 user receives the committer name and defaults to current repository
2479 user receives the committer name and defaults to current repository
2478 username, date is the commit date in any format supported by
2480 username, date is the commit date in any format supported by
2479 util.parsedate() and defaults to current date, extra is a dictionary of
2481 util.parsedate() and defaults to current date, extra is a dictionary of
2480 metadata or is left empty.
2482 metadata or is left empty.
2481 """
2483 """
2482 def __new__(cls, repo, originalctx, *args, **kwargs):
2484 def __new__(cls, repo, originalctx, *args, **kwargs):
2483 return super(metadataonlyctx, cls).__new__(cls, repo)
2485 return super(metadataonlyctx, cls).__new__(cls, repo)
2484
2486
2485 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2487 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2486 date=None, extra=None, editor=False):
2488 date=None, extra=None, editor=False):
2487 if text is None:
2489 if text is None:
2488 text = originalctx.description()
2490 text = originalctx.description()
2489 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2491 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2490 self._rev = None
2492 self._rev = None
2491 self._node = None
2493 self._node = None
2492 self._originalctx = originalctx
2494 self._originalctx = originalctx
2493 self._manifestnode = originalctx.manifestnode()
2495 self._manifestnode = originalctx.manifestnode()
2494 if parents is None:
2496 if parents is None:
2495 parents = originalctx.parents()
2497 parents = originalctx.parents()
2496 else:
2498 else:
2497 parents = [repo[p] for p in parents if p is not None]
2499 parents = [repo[p] for p in parents if p is not None]
2498 parents = parents[:]
2500 parents = parents[:]
2499 while len(parents) < 2:
2501 while len(parents) < 2:
2500 parents.append(repo[nullid])
2502 parents.append(repo[nullid])
2501 p1, p2 = self._parents = parents
2503 p1, p2 = self._parents = parents
2502
2504
2503 # sanity check to ensure that the reused manifest parents are
2505 # sanity check to ensure that the reused manifest parents are
2504 # manifests of our commit parents
2506 # manifests of our commit parents
2505 mp1, mp2 = self.manifestctx().parents
2507 mp1, mp2 = self.manifestctx().parents
2506 if p1 != nullid and p1.manifestnode() != mp1:
2508 if p1 != nullid and p1.manifestnode() != mp1:
2507 raise RuntimeError('can\'t reuse the manifest: '
2509 raise RuntimeError('can\'t reuse the manifest: '
2508 'its p1 doesn\'t match the new ctx p1')
2510 'its p1 doesn\'t match the new ctx p1')
2509 if p2 != nullid and p2.manifestnode() != mp2:
2511 if p2 != nullid and p2.manifestnode() != mp2:
2510 raise RuntimeError('can\'t reuse the manifest: '
2512 raise RuntimeError('can\'t reuse the manifest: '
2511 'its p2 doesn\'t match the new ctx p2')
2513 'its p2 doesn\'t match the new ctx p2')
2512
2514
2513 self._files = originalctx.files()
2515 self._files = originalctx.files()
2514 self.substate = {}
2516 self.substate = {}
2515
2517
2516 if editor:
2518 if editor:
2517 self._text = editor(self._repo, self, [])
2519 self._text = editor(self._repo, self, [])
2518 self._repo.savecommitmessage(self._text)
2520 self._repo.savecommitmessage(self._text)
2519
2521
2520 def manifestnode(self):
2522 def manifestnode(self):
2521 return self._manifestnode
2523 return self._manifestnode
2522
2524
2523 @property
2525 @property
2524 def _manifestctx(self):
2526 def _manifestctx(self):
2525 return self._repo.manifestlog[self._manifestnode]
2527 return self._repo.manifestlog[self._manifestnode]
2526
2528
2527 def filectx(self, path, filelog=None):
2529 def filectx(self, path, filelog=None):
2528 return self._originalctx.filectx(path, filelog=filelog)
2530 return self._originalctx.filectx(path, filelog=filelog)
2529
2531
2530 def commit(self):
2532 def commit(self):
2531 """commit context to the repo"""
2533 """commit context to the repo"""
2532 return self._repo.commitctx(self)
2534 return self._repo.commitctx(self)
2533
2535
2534 @property
2536 @property
2535 def _manifest(self):
2537 def _manifest(self):
2536 return self._originalctx.manifest()
2538 return self._originalctx.manifest()
2537
2539
2538 @propertycache
2540 @propertycache
2539 def _status(self):
2541 def _status(self):
2540 """Calculate exact status from ``files`` specified in the ``origctx``
2542 """Calculate exact status from ``files`` specified in the ``origctx``
2541 and parents manifests.
2543 and parents manifests.
2542 """
2544 """
2543 man1 = self.p1().manifest()
2545 man1 = self.p1().manifest()
2544 p2 = self._parents[1]
2546 p2 = self._parents[1]
2545 # "1 < len(self._parents)" can't be used for checking
2547 # "1 < len(self._parents)" can't be used for checking
2546 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2548 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2547 # explicitly initialized by the list, of which length is 2.
2549 # explicitly initialized by the list, of which length is 2.
2548 if p2.node() != nullid:
2550 if p2.node() != nullid:
2549 man2 = p2.manifest()
2551 man2 = p2.manifest()
2550 managing = lambda f: f in man1 or f in man2
2552 managing = lambda f: f in man1 or f in man2
2551 else:
2553 else:
2552 managing = lambda f: f in man1
2554 managing = lambda f: f in man1
2553
2555
2554 modified, added, removed = [], [], []
2556 modified, added, removed = [], [], []
2555 for f in self._files:
2557 for f in self._files:
2556 if not managing(f):
2558 if not managing(f):
2557 added.append(f)
2559 added.append(f)
2558 elif f in self:
2560 elif f in self:
2559 modified.append(f)
2561 modified.append(f)
2560 else:
2562 else:
2561 removed.append(f)
2563 removed.append(f)
2562
2564
2563 return scmutil.status(modified, added, removed, [], [], [], [])
2565 return scmutil.status(modified, added, removed, [], [], [], [])
2564
2566
2565 class arbitraryfilectx(object):
2567 class arbitraryfilectx(object):
2566 """Allows you to use filectx-like functions on a file in an arbitrary
2568 """Allows you to use filectx-like functions on a file in an arbitrary
2567 location on disk, possibly not in the working directory.
2569 location on disk, possibly not in the working directory.
2568 """
2570 """
2569 def __init__(self, path, repo=None):
2571 def __init__(self, path, repo=None):
2570 # Repo is optional because contrib/simplemerge uses this class.
2572 # Repo is optional because contrib/simplemerge uses this class.
2571 self._repo = repo
2573 self._repo = repo
2572 self._path = path
2574 self._path = path
2573
2575
2574 def cmp(self, fctx):
2576 def cmp(self, fctx):
2575 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2577 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2576 # path if either side is a symlink.
2578 # path if either side is a symlink.
2577 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2579 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2578 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2580 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2579 # Add a fast-path for merge if both sides are disk-backed.
2581 # Add a fast-path for merge if both sides are disk-backed.
2580 # Note that filecmp uses the opposite return values (True if same)
2582 # Note that filecmp uses the opposite return values (True if same)
2581 # from our cmp functions (True if different).
2583 # from our cmp functions (True if different).
2582 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2584 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2583 return self.data() != fctx.data()
2585 return self.data() != fctx.data()
2584
2586
2585 def path(self):
2587 def path(self):
2586 return self._path
2588 return self._path
2587
2589
2588 def flags(self):
2590 def flags(self):
2589 return ''
2591 return ''
2590
2592
2591 def data(self):
2593 def data(self):
2592 return util.readfile(self._path)
2594 return util.readfile(self._path)
2593
2595
2594 def decodeddata(self):
2596 def decodeddata(self):
2595 with open(self._path, "rb") as f:
2597 with open(self._path, "rb") as f:
2596 return f.read()
2598 return f.read()
2597
2599
2598 def remove(self):
2600 def remove(self):
2599 util.unlink(self._path)
2601 util.unlink(self._path)
2600
2602
2601 def write(self, data, flags):
2603 def write(self, data, flags):
2602 assert not flags
2604 assert not flags
2603 with open(self._path, "w") as f:
2605 with open(self._path, "w") as f:
2604 f.write(data)
2606 f.write(data)
@@ -1,643 +1,644
1 # hgweb/webutil.py - utility library for the web interface.
1 # hgweb/webutil.py - utility library for the web interface.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import difflib
12 import difflib
13 import os
13 import os
14 import re
14 import re
15
15
16 from ..i18n import _
16 from ..i18n import _
17 from ..node import hex, nullid, short
17 from ..node import hex, nullid, short
18
18
19 from .common import (
19 from .common import (
20 ErrorResponse,
20 ErrorResponse,
21 HTTP_BAD_REQUEST,
21 HTTP_BAD_REQUEST,
22 HTTP_NOT_FOUND,
22 HTTP_NOT_FOUND,
23 paritygen,
23 paritygen,
24 )
24 )
25
25
26 from .. import (
26 from .. import (
27 context,
27 context,
28 error,
28 error,
29 match,
29 match,
30 mdiff,
30 mdiff,
31 patch,
31 patch,
32 pathutil,
32 pathutil,
33 pycompat,
33 pycompat,
34 templatefilters,
34 templatefilters,
35 ui as uimod,
35 ui as uimod,
36 util,
36 util,
37 )
37 )
38
38
39 def up(p):
39 def up(p):
40 if p[0] != "/":
40 if p[0] != "/":
41 p = "/" + p
41 p = "/" + p
42 if p[-1] == "/":
42 if p[-1] == "/":
43 p = p[:-1]
43 p = p[:-1]
44 up = os.path.dirname(p)
44 up = os.path.dirname(p)
45 if up == "/":
45 if up == "/":
46 return "/"
46 return "/"
47 return up + "/"
47 return up + "/"
48
48
49 def _navseq(step, firststep=None):
49 def _navseq(step, firststep=None):
50 if firststep:
50 if firststep:
51 yield firststep
51 yield firststep
52 if firststep >= 20 and firststep <= 40:
52 if firststep >= 20 and firststep <= 40:
53 firststep = 50
53 firststep = 50
54 yield firststep
54 yield firststep
55 assert step > 0
55 assert step > 0
56 assert firststep > 0
56 assert firststep > 0
57 while step <= firststep:
57 while step <= firststep:
58 step *= 10
58 step *= 10
59 while True:
59 while True:
60 yield 1 * step
60 yield 1 * step
61 yield 3 * step
61 yield 3 * step
62 step *= 10
62 step *= 10
63
63
64 class revnav(object):
64 class revnav(object):
65
65
66 def __init__(self, repo):
66 def __init__(self, repo):
67 """Navigation generation object
67 """Navigation generation object
68
68
69 :repo: repo object we generate nav for
69 :repo: repo object we generate nav for
70 """
70 """
71 # used for hex generation
71 # used for hex generation
72 self._revlog = repo.changelog
72 self._revlog = repo.changelog
73
73
74 def __nonzero__(self):
74 def __nonzero__(self):
75 """return True if any revision to navigate over"""
75 """return True if any revision to navigate over"""
76 return self._first() is not None
76 return self._first() is not None
77
77
78 __bool__ = __nonzero__
78 __bool__ = __nonzero__
79
79
80 def _first(self):
80 def _first(self):
81 """return the minimum non-filtered changeset or None"""
81 """return the minimum non-filtered changeset or None"""
82 try:
82 try:
83 return next(iter(self._revlog))
83 return next(iter(self._revlog))
84 except StopIteration:
84 except StopIteration:
85 return None
85 return None
86
86
87 def hex(self, rev):
87 def hex(self, rev):
88 return hex(self._revlog.node(rev))
88 return hex(self._revlog.node(rev))
89
89
90 def gen(self, pos, pagelen, limit):
90 def gen(self, pos, pagelen, limit):
91 """computes label and revision id for navigation link
91 """computes label and revision id for navigation link
92
92
93 :pos: is the revision relative to which we generate navigation.
93 :pos: is the revision relative to which we generate navigation.
94 :pagelen: the size of each navigation page
94 :pagelen: the size of each navigation page
95 :limit: how far shall we link
95 :limit: how far shall we link
96
96
97 The return is:
97 The return is:
98 - a single element tuple
98 - a single element tuple
99 - containing a dictionary with a `before` and `after` key
99 - containing a dictionary with a `before` and `after` key
100 - values are generator functions taking arbitrary number of kwargs
100 - values are generator functions taking arbitrary number of kwargs
101 - yield items are dictionaries with `label` and `node` keys
101 - yield items are dictionaries with `label` and `node` keys
102 """
102 """
103 if not self:
103 if not self:
104 # empty repo
104 # empty repo
105 return ({'before': (), 'after': ()},)
105 return ({'before': (), 'after': ()},)
106
106
107 targets = []
107 targets = []
108 for f in _navseq(1, pagelen):
108 for f in _navseq(1, pagelen):
109 if f > limit:
109 if f > limit:
110 break
110 break
111 targets.append(pos + f)
111 targets.append(pos + f)
112 targets.append(pos - f)
112 targets.append(pos - f)
113 targets.sort()
113 targets.sort()
114
114
115 first = self._first()
115 first = self._first()
116 navbefore = [("(%i)" % first, self.hex(first))]
116 navbefore = [("(%i)" % first, self.hex(first))]
117 navafter = []
117 navafter = []
118 for rev in targets:
118 for rev in targets:
119 if rev not in self._revlog:
119 if rev not in self._revlog:
120 continue
120 continue
121 if pos < rev < limit:
121 if pos < rev < limit:
122 navafter.append(("+%d" % abs(rev - pos), self.hex(rev)))
122 navafter.append(("+%d" % abs(rev - pos), self.hex(rev)))
123 if 0 < rev < pos:
123 if 0 < rev < pos:
124 navbefore.append(("-%d" % abs(rev - pos), self.hex(rev)))
124 navbefore.append(("-%d" % abs(rev - pos), self.hex(rev)))
125
125
126
126
127 navafter.append(("tip", "tip"))
127 navafter.append(("tip", "tip"))
128
128
129 data = lambda i: {"label": i[0], "node": i[1]}
129 data = lambda i: {"label": i[0], "node": i[1]}
130 return ({'before': lambda **map: (data(i) for i in navbefore),
130 return ({'before': lambda **map: (data(i) for i in navbefore),
131 'after': lambda **map: (data(i) for i in navafter)},)
131 'after': lambda **map: (data(i) for i in navafter)},)
132
132
133 class filerevnav(revnav):
133 class filerevnav(revnav):
134
134
135 def __init__(self, repo, path):
135 def __init__(self, repo, path):
136 """Navigation generation object
136 """Navigation generation object
137
137
138 :repo: repo object we generate nav for
138 :repo: repo object we generate nav for
139 :path: path of the file we generate nav for
139 :path: path of the file we generate nav for
140 """
140 """
141 # used for iteration
141 # used for iteration
142 self._changelog = repo.unfiltered().changelog
142 self._changelog = repo.unfiltered().changelog
143 # used for hex generation
143 # used for hex generation
144 self._revlog = repo.file(path)
144 self._revlog = repo.file(path)
145
145
146 def hex(self, rev):
146 def hex(self, rev):
147 return hex(self._changelog.node(self._revlog.linkrev(rev)))
147 return hex(self._changelog.node(self._revlog.linkrev(rev)))
148
148
149 class _siblings(object):
149 class _siblings(object):
150 def __init__(self, siblings=None, hiderev=None):
150 def __init__(self, siblings=None, hiderev=None):
151 if siblings is None:
151 if siblings is None:
152 siblings = []
152 siblings = []
153 self.siblings = [s for s in siblings if s.node() != nullid]
153 self.siblings = [s for s in siblings if s.node() != nullid]
154 if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev:
154 if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev:
155 self.siblings = []
155 self.siblings = []
156
156
157 def __iter__(self):
157 def __iter__(self):
158 for s in self.siblings:
158 for s in self.siblings:
159 d = {
159 d = {
160 'node': s.hex(),
160 'node': s.hex(),
161 'rev': s.rev(),
161 'rev': s.rev(),
162 'user': s.user(),
162 'user': s.user(),
163 'date': s.date(),
163 'date': s.date(),
164 'description': s.description(),
164 'description': s.description(),
165 'branch': s.branch(),
165 'branch': s.branch(),
166 }
166 }
167 if util.safehasattr(s, 'path'):
167 if util.safehasattr(s, 'path'):
168 d['file'] = s.path()
168 d['file'] = s.path()
169 yield d
169 yield d
170
170
171 def __len__(self):
171 def __len__(self):
172 return len(self.siblings)
172 return len(self.siblings)
173
173
174 def difffeatureopts(req, ui, section):
174 def difffeatureopts(req, ui, section):
175 diffopts = patch.difffeatureopts(ui, untrusted=True,
175 diffopts = patch.difffeatureopts(ui, untrusted=True,
176 section=section, whitespace=True)
176 section=section, whitespace=True)
177
177
178 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
178 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
179 v = req.form.get(k, [None])[0]
179 v = req.form.get(k, [None])[0]
180 if v is not None:
180 if v is not None:
181 v = util.parsebool(v)
181 v = util.parsebool(v)
182 setattr(diffopts, k, v if v is not None else True)
182 setattr(diffopts, k, v if v is not None else True)
183
183
184 return diffopts
184 return diffopts
185
185
186 def annotate(req, fctx, ui):
186 def annotate(req, fctx, ui):
187 diffopts = difffeatureopts(req, ui, 'annotate')
187 diffopts = difffeatureopts(req, ui, 'annotate')
188 return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts)
188 return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts)
189
189
190 def parents(ctx, hide=None):
190 def parents(ctx, hide=None):
191 if isinstance(ctx, context.basefilectx):
191 if isinstance(ctx, context.basefilectx):
192 introrev = ctx.introrev()
192 introrev = ctx.introrev()
193 if ctx.changectx().rev() != introrev:
193 if ctx.changectx().rev() != introrev:
194 return _siblings([ctx.repo()[introrev]], hide)
194 return _siblings([ctx.repo()[introrev]], hide)
195 return _siblings(ctx.parents(), hide)
195 return _siblings(ctx.parents(), hide)
196
196
197 def children(ctx, hide=None):
197 def children(ctx, hide=None):
198 return _siblings(ctx.children(), hide)
198 return _siblings(ctx.children(), hide)
199
199
200 def renamelink(fctx):
200 def renamelink(fctx):
201 r = fctx.renamed()
201 r = fctx.renamed()
202 if r:
202 if r:
203 return [{'file': r[0], 'node': hex(r[1])}]
203 return [{'file': r[0], 'node': hex(r[1])}]
204 return []
204 return []
205
205
206 def nodetagsdict(repo, node):
206 def nodetagsdict(repo, node):
207 return [{"name": i} for i in repo.nodetags(node)]
207 return [{"name": i} for i in repo.nodetags(node)]
208
208
209 def nodebookmarksdict(repo, node):
209 def nodebookmarksdict(repo, node):
210 return [{"name": i} for i in repo.nodebookmarks(node)]
210 return [{"name": i} for i in repo.nodebookmarks(node)]
211
211
212 def nodebranchdict(repo, ctx):
212 def nodebranchdict(repo, ctx):
213 branches = []
213 branches = []
214 branch = ctx.branch()
214 branch = ctx.branch()
215 # If this is an empty repo, ctx.node() == nullid,
215 # If this is an empty repo, ctx.node() == nullid,
216 # ctx.branch() == 'default'.
216 # ctx.branch() == 'default'.
217 try:
217 try:
218 branchnode = repo.branchtip(branch)
218 branchnode = repo.branchtip(branch)
219 except error.RepoLookupError:
219 except error.RepoLookupError:
220 branchnode = None
220 branchnode = None
221 if branchnode == ctx.node():
221 if branchnode == ctx.node():
222 branches.append({"name": branch})
222 branches.append({"name": branch})
223 return branches
223 return branches
224
224
225 def nodeinbranch(repo, ctx):
225 def nodeinbranch(repo, ctx):
226 branches = []
226 branches = []
227 branch = ctx.branch()
227 branch = ctx.branch()
228 try:
228 try:
229 branchnode = repo.branchtip(branch)
229 branchnode = repo.branchtip(branch)
230 except error.RepoLookupError:
230 except error.RepoLookupError:
231 branchnode = None
231 branchnode = None
232 if branch != 'default' and branchnode != ctx.node():
232 if branch != 'default' and branchnode != ctx.node():
233 branches.append({"name": branch})
233 branches.append({"name": branch})
234 return branches
234 return branches
235
235
236 def nodebranchnodefault(ctx):
236 def nodebranchnodefault(ctx):
237 branches = []
237 branches = []
238 branch = ctx.branch()
238 branch = ctx.branch()
239 if branch != 'default':
239 if branch != 'default':
240 branches.append({"name": branch})
240 branches.append({"name": branch})
241 return branches
241 return branches
242
242
243 def showtag(repo, tmpl, t1, node=nullid, **args):
243 def showtag(repo, tmpl, t1, node=nullid, **args):
244 for t in repo.nodetags(node):
244 for t in repo.nodetags(node):
245 yield tmpl(t1, tag=t, **args)
245 yield tmpl(t1, tag=t, **args)
246
246
247 def showbookmark(repo, tmpl, t1, node=nullid, **args):
247 def showbookmark(repo, tmpl, t1, node=nullid, **args):
248 for t in repo.nodebookmarks(node):
248 for t in repo.nodebookmarks(node):
249 yield tmpl(t1, bookmark=t, **args)
249 yield tmpl(t1, bookmark=t, **args)
250
250
251 def branchentries(repo, stripecount, limit=0):
251 def branchentries(repo, stripecount, limit=0):
252 tips = []
252 tips = []
253 heads = repo.heads()
253 heads = repo.heads()
254 parity = paritygen(stripecount)
254 parity = paritygen(stripecount)
255 sortkey = lambda item: (not item[1], item[0].rev())
255 sortkey = lambda item: (not item[1], item[0].rev())
256
256
257 def entries(**map):
257 def entries(**map):
258 count = 0
258 count = 0
259 if not tips:
259 if not tips:
260 for tag, hs, tip, closed in repo.branchmap().iterbranches():
260 for tag, hs, tip, closed in repo.branchmap().iterbranches():
261 tips.append((repo[tip], closed))
261 tips.append((repo[tip], closed))
262 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
262 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
263 if limit > 0 and count >= limit:
263 if limit > 0 and count >= limit:
264 return
264 return
265 count += 1
265 count += 1
266 if closed:
266 if closed:
267 status = 'closed'
267 status = 'closed'
268 elif ctx.node() not in heads:
268 elif ctx.node() not in heads:
269 status = 'inactive'
269 status = 'inactive'
270 else:
270 else:
271 status = 'open'
271 status = 'open'
272 yield {
272 yield {
273 'parity': next(parity),
273 'parity': next(parity),
274 'branch': ctx.branch(),
274 'branch': ctx.branch(),
275 'status': status,
275 'status': status,
276 'node': ctx.hex(),
276 'node': ctx.hex(),
277 'date': ctx.date()
277 'date': ctx.date()
278 }
278 }
279
279
280 return entries
280 return entries
281
281
282 def cleanpath(repo, path):
282 def cleanpath(repo, path):
283 path = path.lstrip('/')
283 path = path.lstrip('/')
284 return pathutil.canonpath(repo.root, '', path)
284 return pathutil.canonpath(repo.root, '', path)
285
285
286 def changeidctx(repo, changeid):
286 def changeidctx(repo, changeid):
287 try:
287 try:
288 ctx = repo[changeid]
288 ctx = repo[changeid]
289 except error.RepoError:
289 except error.RepoError:
290 man = repo.manifestlog._revlog
290 man = repo.manifestlog._revlog
291 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
291 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
292
292
293 return ctx
293 return ctx
294
294
295 def changectx(repo, req):
295 def changectx(repo, req):
296 changeid = "tip"
296 changeid = "tip"
297 if 'node' in req.form:
297 if 'node' in req.form:
298 changeid = req.form['node'][0]
298 changeid = req.form['node'][0]
299 ipos = changeid.find(':')
299 ipos = changeid.find(':')
300 if ipos != -1:
300 if ipos != -1:
301 changeid = changeid[(ipos + 1):]
301 changeid = changeid[(ipos + 1):]
302 elif 'manifest' in req.form:
302 elif 'manifest' in req.form:
303 changeid = req.form['manifest'][0]
303 changeid = req.form['manifest'][0]
304
304
305 return changeidctx(repo, changeid)
305 return changeidctx(repo, changeid)
306
306
307 def basechangectx(repo, req):
307 def basechangectx(repo, req):
308 if 'node' in req.form:
308 if 'node' in req.form:
309 changeid = req.form['node'][0]
309 changeid = req.form['node'][0]
310 ipos = changeid.find(':')
310 ipos = changeid.find(':')
311 if ipos != -1:
311 if ipos != -1:
312 changeid = changeid[:ipos]
312 changeid = changeid[:ipos]
313 return changeidctx(repo, changeid)
313 return changeidctx(repo, changeid)
314
314
315 return None
315 return None
316
316
317 def filectx(repo, req):
317 def filectx(repo, req):
318 if 'file' not in req.form:
318 if 'file' not in req.form:
319 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
319 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
320 path = cleanpath(repo, req.form['file'][0])
320 path = cleanpath(repo, req.form['file'][0])
321 if 'node' in req.form:
321 if 'node' in req.form:
322 changeid = req.form['node'][0]
322 changeid = req.form['node'][0]
323 elif 'filenode' in req.form:
323 elif 'filenode' in req.form:
324 changeid = req.form['filenode'][0]
324 changeid = req.form['filenode'][0]
325 else:
325 else:
326 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
326 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
327 try:
327 try:
328 fctx = repo[changeid][path]
328 fctx = repo[changeid][path]
329 except error.RepoError:
329 except error.RepoError:
330 fctx = repo.filectx(path, fileid=changeid)
330 fctx = repo.filectx(path, fileid=changeid)
331
331
332 return fctx
332 return fctx
333
333
334 def linerange(req):
334 def linerange(req):
335 linerange = req.form.get('linerange')
335 linerange = req.form.get('linerange')
336 if linerange is None:
336 if linerange is None:
337 return None
337 return None
338 if len(linerange) > 1:
338 if len(linerange) > 1:
339 raise ErrorResponse(HTTP_BAD_REQUEST,
339 raise ErrorResponse(HTTP_BAD_REQUEST,
340 'redundant linerange parameter')
340 'redundant linerange parameter')
341 try:
341 try:
342 fromline, toline = map(int, linerange[0].split(':', 1))
342 fromline, toline = map(int, linerange[0].split(':', 1))
343 except ValueError:
343 except ValueError:
344 raise ErrorResponse(HTTP_BAD_REQUEST,
344 raise ErrorResponse(HTTP_BAD_REQUEST,
345 'invalid linerange parameter')
345 'invalid linerange parameter')
346 try:
346 try:
347 return util.processlinerange(fromline, toline)
347 return util.processlinerange(fromline, toline)
348 except error.ParseError as exc:
348 except error.ParseError as exc:
349 raise ErrorResponse(HTTP_BAD_REQUEST, str(exc))
349 raise ErrorResponse(HTTP_BAD_REQUEST, str(exc))
350
350
351 def formatlinerange(fromline, toline):
351 def formatlinerange(fromline, toline):
352 return '%d:%d' % (fromline + 1, toline)
352 return '%d:%d' % (fromline + 1, toline)
353
353
354 def commonentry(repo, ctx):
354 def commonentry(repo, ctx):
355 node = ctx.node()
355 node = ctx.node()
356 return {
356 return {
357 'rev': ctx.rev(),
357 'rev': ctx.rev(),
358 'node': hex(node),
358 'node': hex(node),
359 'author': ctx.user(),
359 'author': ctx.user(),
360 'desc': ctx.description(),
360 'desc': ctx.description(),
361 'date': ctx.date(),
361 'date': ctx.date(),
362 'extra': ctx.extra(),
362 'extra': ctx.extra(),
363 'phase': ctx.phasestr(),
363 'phase': ctx.phasestr(),
364 'obsolete': ctx.obsolete(),
364 'branch': nodebranchnodefault(ctx),
365 'branch': nodebranchnodefault(ctx),
365 'inbranch': nodeinbranch(repo, ctx),
366 'inbranch': nodeinbranch(repo, ctx),
366 'branches': nodebranchdict(repo, ctx),
367 'branches': nodebranchdict(repo, ctx),
367 'tags': nodetagsdict(repo, node),
368 'tags': nodetagsdict(repo, node),
368 'bookmarks': nodebookmarksdict(repo, node),
369 'bookmarks': nodebookmarksdict(repo, node),
369 'parent': lambda **x: parents(ctx),
370 'parent': lambda **x: parents(ctx),
370 'child': lambda **x: children(ctx),
371 'child': lambda **x: children(ctx),
371 }
372 }
372
373
373 def changelistentry(web, ctx, tmpl):
374 def changelistentry(web, ctx, tmpl):
374 '''Obtain a dictionary to be used for entries in a changelist.
375 '''Obtain a dictionary to be used for entries in a changelist.
375
376
376 This function is called when producing items for the "entries" list passed
377 This function is called when producing items for the "entries" list passed
377 to the "shortlog" and "changelog" templates.
378 to the "shortlog" and "changelog" templates.
378 '''
379 '''
379 repo = web.repo
380 repo = web.repo
380 rev = ctx.rev()
381 rev = ctx.rev()
381 n = ctx.node()
382 n = ctx.node()
382 showtags = showtag(repo, tmpl, 'changelogtag', n)
383 showtags = showtag(repo, tmpl, 'changelogtag', n)
383 files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
384 files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
384
385
385 entry = commonentry(repo, ctx)
386 entry = commonentry(repo, ctx)
386 entry.update(
387 entry.update(
387 allparents=lambda **x: parents(ctx),
388 allparents=lambda **x: parents(ctx),
388 parent=lambda **x: parents(ctx, rev - 1),
389 parent=lambda **x: parents(ctx, rev - 1),
389 child=lambda **x: children(ctx, rev + 1),
390 child=lambda **x: children(ctx, rev + 1),
390 changelogtag=showtags,
391 changelogtag=showtags,
391 files=files,
392 files=files,
392 )
393 )
393 return entry
394 return entry
394
395
395 def symrevorshortnode(req, ctx):
396 def symrevorshortnode(req, ctx):
396 if 'node' in req.form:
397 if 'node' in req.form:
397 return templatefilters.revescape(req.form['node'][0])
398 return templatefilters.revescape(req.form['node'][0])
398 else:
399 else:
399 return short(ctx.node())
400 return short(ctx.node())
400
401
401 def changesetentry(web, req, tmpl, ctx):
402 def changesetentry(web, req, tmpl, ctx):
402 '''Obtain a dictionary to be used to render the "changeset" template.'''
403 '''Obtain a dictionary to be used to render the "changeset" template.'''
403
404
404 showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node())
405 showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node())
405 showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark',
406 showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark',
406 ctx.node())
407 ctx.node())
407 showbranch = nodebranchnodefault(ctx)
408 showbranch = nodebranchnodefault(ctx)
408
409
409 files = []
410 files = []
410 parity = paritygen(web.stripecount)
411 parity = paritygen(web.stripecount)
411 for blockno, f in enumerate(ctx.files()):
412 for blockno, f in enumerate(ctx.files()):
412 template = f in ctx and 'filenodelink' or 'filenolink'
413 template = f in ctx and 'filenodelink' or 'filenolink'
413 files.append(tmpl(template,
414 files.append(tmpl(template,
414 node=ctx.hex(), file=f, blockno=blockno + 1,
415 node=ctx.hex(), file=f, blockno=blockno + 1,
415 parity=next(parity)))
416 parity=next(parity)))
416
417
417 basectx = basechangectx(web.repo, req)
418 basectx = basechangectx(web.repo, req)
418 if basectx is None:
419 if basectx is None:
419 basectx = ctx.p1()
420 basectx = ctx.p1()
420
421
421 style = web.config('web', 'style')
422 style = web.config('web', 'style')
422 if 'style' in req.form:
423 if 'style' in req.form:
423 style = req.form['style'][0]
424 style = req.form['style'][0]
424
425
425 diff = diffs(web, tmpl, ctx, basectx, None, style)
426 diff = diffs(web, tmpl, ctx, basectx, None, style)
426
427
427 parity = paritygen(web.stripecount)
428 parity = paritygen(web.stripecount)
428 diffstatsgen = diffstatgen(ctx, basectx)
429 diffstatsgen = diffstatgen(ctx, basectx)
429 diffstats = diffstat(tmpl, ctx, diffstatsgen, parity)
430 diffstats = diffstat(tmpl, ctx, diffstatsgen, parity)
430
431
431 return dict(
432 return dict(
432 diff=diff,
433 diff=diff,
433 symrev=symrevorshortnode(req, ctx),
434 symrev=symrevorshortnode(req, ctx),
434 basenode=basectx.hex(),
435 basenode=basectx.hex(),
435 changesettag=showtags,
436 changesettag=showtags,
436 changesetbookmark=showbookmarks,
437 changesetbookmark=showbookmarks,
437 changesetbranch=showbranch,
438 changesetbranch=showbranch,
438 files=files,
439 files=files,
439 diffsummary=lambda **x: diffsummary(diffstatsgen),
440 diffsummary=lambda **x: diffsummary(diffstatsgen),
440 diffstat=diffstats,
441 diffstat=diffstats,
441 archives=web.archivelist(ctx.hex()),
442 archives=web.archivelist(ctx.hex()),
442 **commonentry(web.repo, ctx))
443 **commonentry(web.repo, ctx))
443
444
444 def listfilediffs(tmpl, files, node, max):
445 def listfilediffs(tmpl, files, node, max):
445 for f in files[:max]:
446 for f in files[:max]:
446 yield tmpl('filedifflink', node=hex(node), file=f)
447 yield tmpl('filedifflink', node=hex(node), file=f)
447 if len(files) > max:
448 if len(files) > max:
448 yield tmpl('fileellipses')
449 yield tmpl('fileellipses')
449
450
450 def diffs(web, tmpl, ctx, basectx, files, style, linerange=None,
451 def diffs(web, tmpl, ctx, basectx, files, style, linerange=None,
451 lineidprefix=''):
452 lineidprefix=''):
452
453
453 def prettyprintlines(lines, blockno):
454 def prettyprintlines(lines, blockno):
454 for lineno, l in enumerate(lines, 1):
455 for lineno, l in enumerate(lines, 1):
455 difflineno = "%d.%d" % (blockno, lineno)
456 difflineno = "%d.%d" % (blockno, lineno)
456 if l.startswith('+'):
457 if l.startswith('+'):
457 ltype = "difflineplus"
458 ltype = "difflineplus"
458 elif l.startswith('-'):
459 elif l.startswith('-'):
459 ltype = "difflineminus"
460 ltype = "difflineminus"
460 elif l.startswith('@'):
461 elif l.startswith('@'):
461 ltype = "difflineat"
462 ltype = "difflineat"
462 else:
463 else:
463 ltype = "diffline"
464 ltype = "diffline"
464 yield tmpl(ltype,
465 yield tmpl(ltype,
465 line=l,
466 line=l,
466 lineno=lineno,
467 lineno=lineno,
467 lineid=lineidprefix + "l%s" % difflineno,
468 lineid=lineidprefix + "l%s" % difflineno,
468 linenumber="% 8s" % difflineno)
469 linenumber="% 8s" % difflineno)
469
470
470 repo = web.repo
471 repo = web.repo
471 if files:
472 if files:
472 m = match.exact(repo.root, repo.getcwd(), files)
473 m = match.exact(repo.root, repo.getcwd(), files)
473 else:
474 else:
474 m = match.always(repo.root, repo.getcwd())
475 m = match.always(repo.root, repo.getcwd())
475
476
476 diffopts = patch.diffopts(repo.ui, untrusted=True)
477 diffopts = patch.diffopts(repo.ui, untrusted=True)
477 node1 = basectx.node()
478 node1 = basectx.node()
478 node2 = ctx.node()
479 node2 = ctx.node()
479 parity = paritygen(web.stripecount)
480 parity = paritygen(web.stripecount)
480
481
481 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
482 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
482 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
483 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
483 if style != 'raw':
484 if style != 'raw':
484 header = header[1:]
485 header = header[1:]
485 lines = [h + '\n' for h in header]
486 lines = [h + '\n' for h in header]
486 for hunkrange, hunklines in hunks:
487 for hunkrange, hunklines in hunks:
487 if linerange is not None and hunkrange is not None:
488 if linerange is not None and hunkrange is not None:
488 s1, l1, s2, l2 = hunkrange
489 s1, l1, s2, l2 = hunkrange
489 if not mdiff.hunkinrange((s2, l2), linerange):
490 if not mdiff.hunkinrange((s2, l2), linerange):
490 continue
491 continue
491 lines.extend(hunklines)
492 lines.extend(hunklines)
492 if lines:
493 if lines:
493 yield tmpl('diffblock', parity=next(parity), blockno=blockno,
494 yield tmpl('diffblock', parity=next(parity), blockno=blockno,
494 lines=prettyprintlines(lines, blockno))
495 lines=prettyprintlines(lines, blockno))
495
496
496 def compare(tmpl, context, leftlines, rightlines):
497 def compare(tmpl, context, leftlines, rightlines):
497 '''Generator function that provides side-by-side comparison data.'''
498 '''Generator function that provides side-by-side comparison data.'''
498
499
499 def compline(type, leftlineno, leftline, rightlineno, rightline):
500 def compline(type, leftlineno, leftline, rightlineno, rightline):
500 lineid = leftlineno and ("l%s" % leftlineno) or ''
501 lineid = leftlineno and ("l%s" % leftlineno) or ''
501 lineid += rightlineno and ("r%s" % rightlineno) or ''
502 lineid += rightlineno and ("r%s" % rightlineno) or ''
502 return tmpl('comparisonline',
503 return tmpl('comparisonline',
503 type=type,
504 type=type,
504 lineid=lineid,
505 lineid=lineid,
505 leftlineno=leftlineno,
506 leftlineno=leftlineno,
506 leftlinenumber="% 6s" % (leftlineno or ''),
507 leftlinenumber="% 6s" % (leftlineno or ''),
507 leftline=leftline or '',
508 leftline=leftline or '',
508 rightlineno=rightlineno,
509 rightlineno=rightlineno,
509 rightlinenumber="% 6s" % (rightlineno or ''),
510 rightlinenumber="% 6s" % (rightlineno or ''),
510 rightline=rightline or '')
511 rightline=rightline or '')
511
512
512 def getblock(opcodes):
513 def getblock(opcodes):
513 for type, llo, lhi, rlo, rhi in opcodes:
514 for type, llo, lhi, rlo, rhi in opcodes:
514 len1 = lhi - llo
515 len1 = lhi - llo
515 len2 = rhi - rlo
516 len2 = rhi - rlo
516 count = min(len1, len2)
517 count = min(len1, len2)
517 for i in xrange(count):
518 for i in xrange(count):
518 yield compline(type=type,
519 yield compline(type=type,
519 leftlineno=llo + i + 1,
520 leftlineno=llo + i + 1,
520 leftline=leftlines[llo + i],
521 leftline=leftlines[llo + i],
521 rightlineno=rlo + i + 1,
522 rightlineno=rlo + i + 1,
522 rightline=rightlines[rlo + i])
523 rightline=rightlines[rlo + i])
523 if len1 > len2:
524 if len1 > len2:
524 for i in xrange(llo + count, lhi):
525 for i in xrange(llo + count, lhi):
525 yield compline(type=type,
526 yield compline(type=type,
526 leftlineno=i + 1,
527 leftlineno=i + 1,
527 leftline=leftlines[i],
528 leftline=leftlines[i],
528 rightlineno=None,
529 rightlineno=None,
529 rightline=None)
530 rightline=None)
530 elif len2 > len1:
531 elif len2 > len1:
531 for i in xrange(rlo + count, rhi):
532 for i in xrange(rlo + count, rhi):
532 yield compline(type=type,
533 yield compline(type=type,
533 leftlineno=None,
534 leftlineno=None,
534 leftline=None,
535 leftline=None,
535 rightlineno=i + 1,
536 rightlineno=i + 1,
536 rightline=rightlines[i])
537 rightline=rightlines[i])
537
538
538 s = difflib.SequenceMatcher(None, leftlines, rightlines)
539 s = difflib.SequenceMatcher(None, leftlines, rightlines)
539 if context < 0:
540 if context < 0:
540 yield tmpl('comparisonblock', lines=getblock(s.get_opcodes()))
541 yield tmpl('comparisonblock', lines=getblock(s.get_opcodes()))
541 else:
542 else:
542 for oc in s.get_grouped_opcodes(n=context):
543 for oc in s.get_grouped_opcodes(n=context):
543 yield tmpl('comparisonblock', lines=getblock(oc))
544 yield tmpl('comparisonblock', lines=getblock(oc))
544
545
545 def diffstatgen(ctx, basectx):
546 def diffstatgen(ctx, basectx):
546 '''Generator function that provides the diffstat data.'''
547 '''Generator function that provides the diffstat data.'''
547
548
548 stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx)))
549 stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx)))
549 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
550 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
550 while True:
551 while True:
551 yield stats, maxname, maxtotal, addtotal, removetotal, binary
552 yield stats, maxname, maxtotal, addtotal, removetotal, binary
552
553
553 def diffsummary(statgen):
554 def diffsummary(statgen):
554 '''Return a short summary of the diff.'''
555 '''Return a short summary of the diff.'''
555
556
556 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
557 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
557 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
558 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
558 len(stats), addtotal, removetotal)
559 len(stats), addtotal, removetotal)
559
560
560 def diffstat(tmpl, ctx, statgen, parity):
561 def diffstat(tmpl, ctx, statgen, parity):
561 '''Return a diffstat template for each file in the diff.'''
562 '''Return a diffstat template for each file in the diff.'''
562
563
563 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
564 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
564 files = ctx.files()
565 files = ctx.files()
565
566
566 def pct(i):
567 def pct(i):
567 if maxtotal == 0:
568 if maxtotal == 0:
568 return 0
569 return 0
569 return (float(i) / maxtotal) * 100
570 return (float(i) / maxtotal) * 100
570
571
571 fileno = 0
572 fileno = 0
572 for filename, adds, removes, isbinary in stats:
573 for filename, adds, removes, isbinary in stats:
573 template = filename in files and 'diffstatlink' or 'diffstatnolink'
574 template = filename in files and 'diffstatlink' or 'diffstatnolink'
574 total = adds + removes
575 total = adds + removes
575 fileno += 1
576 fileno += 1
576 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
577 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
577 total=total, addpct=pct(adds), removepct=pct(removes),
578 total=total, addpct=pct(adds), removepct=pct(removes),
578 parity=next(parity))
579 parity=next(parity))
579
580
580 class sessionvars(object):
581 class sessionvars(object):
581 def __init__(self, vars, start='?'):
582 def __init__(self, vars, start='?'):
582 self.start = start
583 self.start = start
583 self.vars = vars
584 self.vars = vars
584 def __getitem__(self, key):
585 def __getitem__(self, key):
585 return self.vars[key]
586 return self.vars[key]
586 def __setitem__(self, key, value):
587 def __setitem__(self, key, value):
587 self.vars[key] = value
588 self.vars[key] = value
588 def __copy__(self):
589 def __copy__(self):
589 return sessionvars(copy.copy(self.vars), self.start)
590 return sessionvars(copy.copy(self.vars), self.start)
590 def __iter__(self):
591 def __iter__(self):
591 separator = self.start
592 separator = self.start
592 for key, value in sorted(self.vars.iteritems()):
593 for key, value in sorted(self.vars.iteritems()):
593 yield {'name': key,
594 yield {'name': key,
594 'value': pycompat.bytestr(value),
595 'value': pycompat.bytestr(value),
595 'separator': separator,
596 'separator': separator,
596 }
597 }
597 separator = '&'
598 separator = '&'
598
599
599 class wsgiui(uimod.ui):
600 class wsgiui(uimod.ui):
600 # default termwidth breaks under mod_wsgi
601 # default termwidth breaks under mod_wsgi
601 def termwidth(self):
602 def termwidth(self):
602 return 80
603 return 80
603
604
604 def getwebsubs(repo):
605 def getwebsubs(repo):
605 websubtable = []
606 websubtable = []
606 websubdefs = repo.ui.configitems('websub')
607 websubdefs = repo.ui.configitems('websub')
607 # we must maintain interhg backwards compatibility
608 # we must maintain interhg backwards compatibility
608 websubdefs += repo.ui.configitems('interhg')
609 websubdefs += repo.ui.configitems('interhg')
609 for key, pattern in websubdefs:
610 for key, pattern in websubdefs:
610 # grab the delimiter from the character after the "s"
611 # grab the delimiter from the character after the "s"
611 unesc = pattern[1]
612 unesc = pattern[1]
612 delim = re.escape(unesc)
613 delim = re.escape(unesc)
613
614
614 # identify portions of the pattern, taking care to avoid escaped
615 # identify portions of the pattern, taking care to avoid escaped
615 # delimiters. the replace format and flags are optional, but
616 # delimiters. the replace format and flags are optional, but
616 # delimiters are required.
617 # delimiters are required.
617 match = re.match(
618 match = re.match(
618 r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
619 r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
619 % (delim, delim, delim), pattern)
620 % (delim, delim, delim), pattern)
620 if not match:
621 if not match:
621 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
622 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
622 % (key, pattern))
623 % (key, pattern))
623 continue
624 continue
624
625
625 # we need to unescape the delimiter for regexp and format
626 # we need to unescape the delimiter for regexp and format
626 delim_re = re.compile(r'(?<!\\)\\%s' % delim)
627 delim_re = re.compile(r'(?<!\\)\\%s' % delim)
627 regexp = delim_re.sub(unesc, match.group(1))
628 regexp = delim_re.sub(unesc, match.group(1))
628 format = delim_re.sub(unesc, match.group(2))
629 format = delim_re.sub(unesc, match.group(2))
629
630
630 # the pattern allows for 6 regexp flags, so set them if necessary
631 # the pattern allows for 6 regexp flags, so set them if necessary
631 flagin = match.group(3)
632 flagin = match.group(3)
632 flags = 0
633 flags = 0
633 if flagin:
634 if flagin:
634 for flag in flagin.upper():
635 for flag in flagin.upper():
635 flags |= re.__dict__[flag]
636 flags |= re.__dict__[flag]
636
637
637 try:
638 try:
638 regexp = re.compile(regexp, flags)
639 regexp = re.compile(regexp, flags)
639 websubtable.append((regexp, format))
640 websubtable.append((regexp, format))
640 except re.error:
641 except re.error:
641 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
642 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
642 % (key, regexp))
643 % (key, regexp))
643 return websubtable
644 return websubtable
General Comments 0
You need to be logged in to leave comments. Login now