##// END OF EJS Templates
context: add instabilities() method to basefilectx...
av6 -
r35092:bd274393 default
parent child Browse files
Show More
@@ -1,2606 +1,2608
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 patch,
39 patch,
40 pathutil,
40 pathutil,
41 phases,
41 phases,
42 pycompat,
42 pycompat,
43 repoview,
43 repoview,
44 revlog,
44 revlog,
45 scmutil,
45 scmutil,
46 sparse,
46 sparse,
47 subrepo,
47 subrepo,
48 util,
48 util,
49 )
49 )
50
50
51 propertycache = util.propertycache
51 propertycache = util.propertycache
52
52
53 nonascii = re.compile(r'[^\x21-\x7f]').search
53 nonascii = re.compile(r'[^\x21-\x7f]').search
54
54
55 class basectx(object):
55 class basectx(object):
56 """A basectx object represents the common logic for its children:
56 """A basectx object represents the common logic for its children:
57 changectx: read-only context that is already present in the repo,
57 changectx: read-only context that is already present in the repo,
58 workingctx: a context that represents the working directory and can
58 workingctx: a context that represents the working directory and can
59 be committed,
59 be committed,
60 memctx: a context that represents changes in-memory and can also
60 memctx: a context that represents changes in-memory and can also
61 be committed."""
61 be committed."""
62 def __new__(cls, repo, changeid='', *args, **kwargs):
62 def __new__(cls, repo, changeid='', *args, **kwargs):
63 if isinstance(changeid, basectx):
63 if isinstance(changeid, basectx):
64 return changeid
64 return changeid
65
65
66 o = super(basectx, cls).__new__(cls)
66 o = super(basectx, cls).__new__(cls)
67
67
68 o._repo = repo
68 o._repo = repo
69 o._rev = nullrev
69 o._rev = nullrev
70 o._node = nullid
70 o._node = nullid
71
71
72 return o
72 return o
73
73
74 def __bytes__(self):
74 def __bytes__(self):
75 return short(self.node())
75 return short(self.node())
76
76
77 __str__ = encoding.strmethod(__bytes__)
77 __str__ = encoding.strmethod(__bytes__)
78
78
79 def __int__(self):
79 def __int__(self):
80 return self.rev()
80 return self.rev()
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepo.state(self, self._repo.ui)
175 return subrepo.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def unstable(self):
208 def unstable(self):
209 msg = ("'context.unstable' is deprecated, "
209 msg = ("'context.unstable' is deprecated, "
210 "use 'context.orphan'")
210 "use 'context.orphan'")
211 self._repo.ui.deprecwarn(msg, '4.4')
211 self._repo.ui.deprecwarn(msg, '4.4')
212 return self.orphan()
212 return self.orphan()
213
213
214 def orphan(self):
214 def orphan(self):
215 """True if the changeset is not obsolete but it's ancestor are"""
215 """True if the changeset is not obsolete but it's ancestor are"""
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
216 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
217
217
218 def bumped(self):
218 def bumped(self):
219 msg = ("'context.bumped' is deprecated, "
219 msg = ("'context.bumped' is deprecated, "
220 "use 'context.phasedivergent'")
220 "use 'context.phasedivergent'")
221 self._repo.ui.deprecwarn(msg, '4.4')
221 self._repo.ui.deprecwarn(msg, '4.4')
222 return self.phasedivergent()
222 return self.phasedivergent()
223
223
224 def phasedivergent(self):
224 def phasedivergent(self):
225 """True if the changeset try to be a successor of a public changeset
225 """True if the changeset try to be a successor of a public changeset
226
226
227 Only non-public and non-obsolete changesets may be bumped.
227 Only non-public and non-obsolete changesets may be bumped.
228 """
228 """
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
229 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
230
230
231 def divergent(self):
231 def divergent(self):
232 msg = ("'context.divergent' is deprecated, "
232 msg = ("'context.divergent' is deprecated, "
233 "use 'context.contentdivergent'")
233 "use 'context.contentdivergent'")
234 self._repo.ui.deprecwarn(msg, '4.4')
234 self._repo.ui.deprecwarn(msg, '4.4')
235 return self.contentdivergent()
235 return self.contentdivergent()
236
236
237 def contentdivergent(self):
237 def contentdivergent(self):
238 """Is a successors of a changeset with multiple possible successors set
238 """Is a successors of a changeset with multiple possible successors set
239
239
240 Only non-public and non-obsolete changesets may be divergent.
240 Only non-public and non-obsolete changesets may be divergent.
241 """
241 """
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
242 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
243
243
244 def troubled(self):
244 def troubled(self):
245 msg = ("'context.troubled' is deprecated, "
245 msg = ("'context.troubled' is deprecated, "
246 "use 'context.isunstable'")
246 "use 'context.isunstable'")
247 self._repo.ui.deprecwarn(msg, '4.4')
247 self._repo.ui.deprecwarn(msg, '4.4')
248 return self.isunstable()
248 return self.isunstable()
249
249
250 def isunstable(self):
250 def isunstable(self):
251 """True if the changeset is either unstable, bumped or divergent"""
251 """True if the changeset is either unstable, bumped or divergent"""
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
252 return self.orphan() or self.phasedivergent() or self.contentdivergent()
253
253
254 def troubles(self):
254 def troubles(self):
255 """Keep the old version around in order to avoid breaking extensions
255 """Keep the old version around in order to avoid breaking extensions
256 about different return values.
256 about different return values.
257 """
257 """
258 msg = ("'context.troubles' is deprecated, "
258 msg = ("'context.troubles' is deprecated, "
259 "use 'context.instabilities'")
259 "use 'context.instabilities'")
260 self._repo.ui.deprecwarn(msg, '4.4')
260 self._repo.ui.deprecwarn(msg, '4.4')
261
261
262 troubles = []
262 troubles = []
263 if self.orphan():
263 if self.orphan():
264 troubles.append('orphan')
264 troubles.append('orphan')
265 if self.phasedivergent():
265 if self.phasedivergent():
266 troubles.append('bumped')
266 troubles.append('bumped')
267 if self.contentdivergent():
267 if self.contentdivergent():
268 troubles.append('divergent')
268 troubles.append('divergent')
269 return troubles
269 return troubles
270
270
271 def instabilities(self):
271 def instabilities(self):
272 """return the list of instabilities affecting this changeset.
272 """return the list of instabilities affecting this changeset.
273
273
274 Instabilities are returned as strings. possible values are:
274 Instabilities are returned as strings. possible values are:
275 - orphan,
275 - orphan,
276 - phase-divergent,
276 - phase-divergent,
277 - content-divergent.
277 - content-divergent.
278 """
278 """
279 instabilities = []
279 instabilities = []
280 if self.orphan():
280 if self.orphan():
281 instabilities.append('orphan')
281 instabilities.append('orphan')
282 if self.phasedivergent():
282 if self.phasedivergent():
283 instabilities.append('phase-divergent')
283 instabilities.append('phase-divergent')
284 if self.contentdivergent():
284 if self.contentdivergent():
285 instabilities.append('content-divergent')
285 instabilities.append('content-divergent')
286 return instabilities
286 return instabilities
287
287
288 def parents(self):
288 def parents(self):
289 """return contexts for each parent changeset"""
289 """return contexts for each parent changeset"""
290 return self._parents
290 return self._parents
291
291
292 def p1(self):
292 def p1(self):
293 return self._parents[0]
293 return self._parents[0]
294
294
295 def p2(self):
295 def p2(self):
296 parents = self._parents
296 parents = self._parents
297 if len(parents) == 2:
297 if len(parents) == 2:
298 return parents[1]
298 return parents[1]
299 return changectx(self._repo, nullrev)
299 return changectx(self._repo, nullrev)
300
300
301 def _fileinfo(self, path):
301 def _fileinfo(self, path):
302 if r'_manifest' in self.__dict__:
302 if r'_manifest' in self.__dict__:
303 try:
303 try:
304 return self._manifest[path], self._manifest.flags(path)
304 return self._manifest[path], self._manifest.flags(path)
305 except KeyError:
305 except KeyError:
306 raise error.ManifestLookupError(self._node, path,
306 raise error.ManifestLookupError(self._node, path,
307 _('not found in manifest'))
307 _('not found in manifest'))
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
308 if r'_manifestdelta' in self.__dict__ or path in self.files():
309 if path in self._manifestdelta:
309 if path in self._manifestdelta:
310 return (self._manifestdelta[path],
310 return (self._manifestdelta[path],
311 self._manifestdelta.flags(path))
311 self._manifestdelta.flags(path))
312 mfl = self._repo.manifestlog
312 mfl = self._repo.manifestlog
313 try:
313 try:
314 node, flag = mfl[self._changeset.manifest].find(path)
314 node, flag = mfl[self._changeset.manifest].find(path)
315 except KeyError:
315 except KeyError:
316 raise error.ManifestLookupError(self._node, path,
316 raise error.ManifestLookupError(self._node, path,
317 _('not found in manifest'))
317 _('not found in manifest'))
318
318
319 return node, flag
319 return node, flag
320
320
321 def filenode(self, path):
321 def filenode(self, path):
322 return self._fileinfo(path)[0]
322 return self._fileinfo(path)[0]
323
323
324 def flags(self, path):
324 def flags(self, path):
325 try:
325 try:
326 return self._fileinfo(path)[1]
326 return self._fileinfo(path)[1]
327 except error.LookupError:
327 except error.LookupError:
328 return ''
328 return ''
329
329
330 def sub(self, path, allowcreate=True):
330 def sub(self, path, allowcreate=True):
331 '''return a subrepo for the stored revision of path, never wdir()'''
331 '''return a subrepo for the stored revision of path, never wdir()'''
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
332 return subrepo.subrepo(self, path, allowcreate=allowcreate)
333
333
334 def nullsub(self, path, pctx):
334 def nullsub(self, path, pctx):
335 return subrepo.nullsubrepo(self, path, pctx)
335 return subrepo.nullsubrepo(self, path, pctx)
336
336
337 def workingsub(self, path):
337 def workingsub(self, path):
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
338 '''return a subrepo for the stored revision, or wdir if this is a wdir
339 context.
339 context.
340 '''
340 '''
341 return subrepo.subrepo(self, path, allowwdir=True)
341 return subrepo.subrepo(self, path, allowwdir=True)
342
342
343 def match(self, pats=None, include=None, exclude=None, default='glob',
343 def match(self, pats=None, include=None, exclude=None, default='glob',
344 listsubrepos=False, badfn=None):
344 listsubrepos=False, badfn=None):
345 r = self._repo
345 r = self._repo
346 return matchmod.match(r.root, r.getcwd(), pats,
346 return matchmod.match(r.root, r.getcwd(), pats,
347 include, exclude, default,
347 include, exclude, default,
348 auditor=r.nofsauditor, ctx=self,
348 auditor=r.nofsauditor, ctx=self,
349 listsubrepos=listsubrepos, badfn=badfn)
349 listsubrepos=listsubrepos, badfn=badfn)
350
350
351 def diff(self, ctx2=None, match=None, **opts):
351 def diff(self, ctx2=None, match=None, **opts):
352 """Returns a diff generator for the given contexts and matcher"""
352 """Returns a diff generator for the given contexts and matcher"""
353 if ctx2 is None:
353 if ctx2 is None:
354 ctx2 = self.p1()
354 ctx2 = self.p1()
355 if ctx2 is not None:
355 if ctx2 is not None:
356 ctx2 = self._repo[ctx2]
356 ctx2 = self._repo[ctx2]
357 diffopts = patch.diffopts(self._repo.ui, opts)
357 diffopts = patch.diffopts(self._repo.ui, opts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
358 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
359
359
360 def dirs(self):
360 def dirs(self):
361 return self._manifest.dirs()
361 return self._manifest.dirs()
362
362
363 def hasdir(self, dir):
363 def hasdir(self, dir):
364 return self._manifest.hasdir(dir)
364 return self._manifest.hasdir(dir)
365
365
366 def status(self, other=None, match=None, listignored=False,
366 def status(self, other=None, match=None, listignored=False,
367 listclean=False, listunknown=False, listsubrepos=False):
367 listclean=False, listunknown=False, listsubrepos=False):
368 """return status of files between two nodes or node and working
368 """return status of files between two nodes or node and working
369 directory.
369 directory.
370
370
371 If other is None, compare this node with working directory.
371 If other is None, compare this node with working directory.
372
372
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
373 returns (modified, added, removed, deleted, unknown, ignored, clean)
374 """
374 """
375
375
376 ctx1 = self
376 ctx1 = self
377 ctx2 = self._repo[other]
377 ctx2 = self._repo[other]
378
378
379 # This next code block is, admittedly, fragile logic that tests for
379 # This next code block is, admittedly, fragile logic that tests for
380 # reversing the contexts and wouldn't need to exist if it weren't for
380 # reversing the contexts and wouldn't need to exist if it weren't for
381 # the fast (and common) code path of comparing the working directory
381 # the fast (and common) code path of comparing the working directory
382 # with its first parent.
382 # with its first parent.
383 #
383 #
384 # What we're aiming for here is the ability to call:
384 # What we're aiming for here is the ability to call:
385 #
385 #
386 # workingctx.status(parentctx)
386 # workingctx.status(parentctx)
387 #
387 #
388 # If we always built the manifest for each context and compared those,
388 # If we always built the manifest for each context and compared those,
389 # then we'd be done. But the special case of the above call means we
389 # then we'd be done. But the special case of the above call means we
390 # just copy the manifest of the parent.
390 # just copy the manifest of the parent.
391 reversed = False
391 reversed = False
392 if (not isinstance(ctx1, changectx)
392 if (not isinstance(ctx1, changectx)
393 and isinstance(ctx2, changectx)):
393 and isinstance(ctx2, changectx)):
394 reversed = True
394 reversed = True
395 ctx1, ctx2 = ctx2, ctx1
395 ctx1, ctx2 = ctx2, ctx1
396
396
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
397 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
398 match = ctx2._matchstatus(ctx1, match)
398 match = ctx2._matchstatus(ctx1, match)
399 r = scmutil.status([], [], [], [], [], [], [])
399 r = scmutil.status([], [], [], [], [], [], [])
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
400 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
401 listunknown)
401 listunknown)
402
402
403 if reversed:
403 if reversed:
404 # Reverse added and removed. Clear deleted, unknown and ignored as
404 # Reverse added and removed. Clear deleted, unknown and ignored as
405 # these make no sense to reverse.
405 # these make no sense to reverse.
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
406 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
407 r.clean)
407 r.clean)
408
408
409 if listsubrepos:
409 if listsubrepos:
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
410 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
411 try:
411 try:
412 rev2 = ctx2.subrev(subpath)
412 rev2 = ctx2.subrev(subpath)
413 except KeyError:
413 except KeyError:
414 # A subrepo that existed in node1 was deleted between
414 # A subrepo that existed in node1 was deleted between
415 # node1 and node2 (inclusive). Thus, ctx2's substate
415 # node1 and node2 (inclusive). Thus, ctx2's substate
416 # won't contain that subpath. The best we can do ignore it.
416 # won't contain that subpath. The best we can do ignore it.
417 rev2 = None
417 rev2 = None
418 submatch = matchmod.subdirmatcher(subpath, match)
418 submatch = matchmod.subdirmatcher(subpath, match)
419 s = sub.status(rev2, match=submatch, ignored=listignored,
419 s = sub.status(rev2, match=submatch, ignored=listignored,
420 clean=listclean, unknown=listunknown,
420 clean=listclean, unknown=listunknown,
421 listsubrepos=True)
421 listsubrepos=True)
422 for rfiles, sfiles in zip(r, s):
422 for rfiles, sfiles in zip(r, s):
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
423 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
424
424
425 for l in r:
425 for l in r:
426 l.sort()
426 l.sort()
427
427
428 return r
428 return r
429
429
430 def _filterederror(repo, changeid):
430 def _filterederror(repo, changeid):
431 """build an exception to be raised about a filtered changeid
431 """build an exception to be raised about a filtered changeid
432
432
433 This is extracted in a function to help extensions (eg: evolve) to
433 This is extracted in a function to help extensions (eg: evolve) to
434 experiment with various message variants."""
434 experiment with various message variants."""
435 if repo.filtername.startswith('visible'):
435 if repo.filtername.startswith('visible'):
436 msg = _("hidden revision '%s'") % changeid
436 msg = _("hidden revision '%s'") % changeid
437 hint = _('use --hidden to access hidden revisions')
437 hint = _('use --hidden to access hidden revisions')
438 return error.FilteredRepoLookupError(msg, hint=hint)
438 return error.FilteredRepoLookupError(msg, hint=hint)
439 msg = _("filtered revision '%s' (not in '%s' subset)")
439 msg = _("filtered revision '%s' (not in '%s' subset)")
440 msg %= (changeid, repo.filtername)
440 msg %= (changeid, repo.filtername)
441 return error.FilteredRepoLookupError(msg)
441 return error.FilteredRepoLookupError(msg)
442
442
443 class changectx(basectx):
443 class changectx(basectx):
444 """A changecontext object makes access to data related to a particular
444 """A changecontext object makes access to data related to a particular
445 changeset convenient. It represents a read-only context already present in
445 changeset convenient. It represents a read-only context already present in
446 the repo."""
446 the repo."""
447 def __init__(self, repo, changeid=''):
447 def __init__(self, repo, changeid=''):
448 """changeid is a revision number, node, or tag"""
448 """changeid is a revision number, node, or tag"""
449
449
450 # since basectx.__new__ already took care of copying the object, we
450 # since basectx.__new__ already took care of copying the object, we
451 # don't need to do anything in __init__, so we just exit here
451 # don't need to do anything in __init__, so we just exit here
452 if isinstance(changeid, basectx):
452 if isinstance(changeid, basectx):
453 return
453 return
454
454
455 if changeid == '':
455 if changeid == '':
456 changeid = '.'
456 changeid = '.'
457 self._repo = repo
457 self._repo = repo
458
458
459 try:
459 try:
460 if isinstance(changeid, int):
460 if isinstance(changeid, int):
461 self._node = repo.changelog.node(changeid)
461 self._node = repo.changelog.node(changeid)
462 self._rev = changeid
462 self._rev = changeid
463 return
463 return
464 if not pycompat.ispy3 and isinstance(changeid, long):
464 if not pycompat.ispy3 and isinstance(changeid, long):
465 changeid = str(changeid)
465 changeid = str(changeid)
466 if changeid == 'null':
466 if changeid == 'null':
467 self._node = nullid
467 self._node = nullid
468 self._rev = nullrev
468 self._rev = nullrev
469 return
469 return
470 if changeid == 'tip':
470 if changeid == 'tip':
471 self._node = repo.changelog.tip()
471 self._node = repo.changelog.tip()
472 self._rev = repo.changelog.rev(self._node)
472 self._rev = repo.changelog.rev(self._node)
473 return
473 return
474 if (changeid == '.'
474 if (changeid == '.'
475 or repo.local() and changeid == repo.dirstate.p1()):
475 or repo.local() and changeid == repo.dirstate.p1()):
476 # this is a hack to delay/avoid loading obsmarkers
476 # this is a hack to delay/avoid loading obsmarkers
477 # when we know that '.' won't be hidden
477 # when we know that '.' won't be hidden
478 self._node = repo.dirstate.p1()
478 self._node = repo.dirstate.p1()
479 self._rev = repo.unfiltered().changelog.rev(self._node)
479 self._rev = repo.unfiltered().changelog.rev(self._node)
480 return
480 return
481 if len(changeid) == 20:
481 if len(changeid) == 20:
482 try:
482 try:
483 self._node = changeid
483 self._node = changeid
484 self._rev = repo.changelog.rev(changeid)
484 self._rev = repo.changelog.rev(changeid)
485 return
485 return
486 except error.FilteredRepoLookupError:
486 except error.FilteredRepoLookupError:
487 raise
487 raise
488 except LookupError:
488 except LookupError:
489 pass
489 pass
490
490
491 try:
491 try:
492 r = int(changeid)
492 r = int(changeid)
493 if '%d' % r != changeid:
493 if '%d' % r != changeid:
494 raise ValueError
494 raise ValueError
495 l = len(repo.changelog)
495 l = len(repo.changelog)
496 if r < 0:
496 if r < 0:
497 r += l
497 r += l
498 if r < 0 or r >= l and r != wdirrev:
498 if r < 0 or r >= l and r != wdirrev:
499 raise ValueError
499 raise ValueError
500 self._rev = r
500 self._rev = r
501 self._node = repo.changelog.node(r)
501 self._node = repo.changelog.node(r)
502 return
502 return
503 except error.FilteredIndexError:
503 except error.FilteredIndexError:
504 raise
504 raise
505 except (ValueError, OverflowError, IndexError):
505 except (ValueError, OverflowError, IndexError):
506 pass
506 pass
507
507
508 if len(changeid) == 40:
508 if len(changeid) == 40:
509 try:
509 try:
510 self._node = bin(changeid)
510 self._node = bin(changeid)
511 self._rev = repo.changelog.rev(self._node)
511 self._rev = repo.changelog.rev(self._node)
512 return
512 return
513 except error.FilteredLookupError:
513 except error.FilteredLookupError:
514 raise
514 raise
515 except (TypeError, LookupError):
515 except (TypeError, LookupError):
516 pass
516 pass
517
517
518 # lookup bookmarks through the name interface
518 # lookup bookmarks through the name interface
519 try:
519 try:
520 self._node = repo.names.singlenode(repo, changeid)
520 self._node = repo.names.singlenode(repo, changeid)
521 self._rev = repo.changelog.rev(self._node)
521 self._rev = repo.changelog.rev(self._node)
522 return
522 return
523 except KeyError:
523 except KeyError:
524 pass
524 pass
525 except error.FilteredRepoLookupError:
525 except error.FilteredRepoLookupError:
526 raise
526 raise
527 except error.RepoLookupError:
527 except error.RepoLookupError:
528 pass
528 pass
529
529
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
530 self._node = repo.unfiltered().changelog._partialmatch(changeid)
531 if self._node is not None:
531 if self._node is not None:
532 self._rev = repo.changelog.rev(self._node)
532 self._rev = repo.changelog.rev(self._node)
533 return
533 return
534
534
535 # lookup failed
535 # lookup failed
536 # check if it might have come from damaged dirstate
536 # check if it might have come from damaged dirstate
537 #
537 #
538 # XXX we could avoid the unfiltered if we had a recognizable
538 # XXX we could avoid the unfiltered if we had a recognizable
539 # exception for filtered changeset access
539 # exception for filtered changeset access
540 if (repo.local()
540 if (repo.local()
541 and changeid in repo.unfiltered().dirstate.parents()):
541 and changeid in repo.unfiltered().dirstate.parents()):
542 msg = _("working directory has unknown parent '%s'!")
542 msg = _("working directory has unknown parent '%s'!")
543 raise error.Abort(msg % short(changeid))
543 raise error.Abort(msg % short(changeid))
544 try:
544 try:
545 if len(changeid) == 20 and nonascii(changeid):
545 if len(changeid) == 20 and nonascii(changeid):
546 changeid = hex(changeid)
546 changeid = hex(changeid)
547 except TypeError:
547 except TypeError:
548 pass
548 pass
549 except (error.FilteredIndexError, error.FilteredLookupError,
549 except (error.FilteredIndexError, error.FilteredLookupError,
550 error.FilteredRepoLookupError):
550 error.FilteredRepoLookupError):
551 raise _filterederror(repo, changeid)
551 raise _filterederror(repo, changeid)
552 except IndexError:
552 except IndexError:
553 pass
553 pass
554 raise error.RepoLookupError(
554 raise error.RepoLookupError(
555 _("unknown revision '%s'") % changeid)
555 _("unknown revision '%s'") % changeid)
556
556
557 def __hash__(self):
557 def __hash__(self):
558 try:
558 try:
559 return hash(self._rev)
559 return hash(self._rev)
560 except AttributeError:
560 except AttributeError:
561 return id(self)
561 return id(self)
562
562
563 def __nonzero__(self):
563 def __nonzero__(self):
564 return self._rev != nullrev
564 return self._rev != nullrev
565
565
566 __bool__ = __nonzero__
566 __bool__ = __nonzero__
567
567
568 @propertycache
568 @propertycache
569 def _changeset(self):
569 def _changeset(self):
570 return self._repo.changelog.changelogrevision(self.rev())
570 return self._repo.changelog.changelogrevision(self.rev())
571
571
572 @propertycache
572 @propertycache
573 def _manifest(self):
573 def _manifest(self):
574 return self._manifestctx.read()
574 return self._manifestctx.read()
575
575
576 @property
576 @property
577 def _manifestctx(self):
577 def _manifestctx(self):
578 return self._repo.manifestlog[self._changeset.manifest]
578 return self._repo.manifestlog[self._changeset.manifest]
579
579
580 @propertycache
580 @propertycache
581 def _manifestdelta(self):
581 def _manifestdelta(self):
582 return self._manifestctx.readdelta()
582 return self._manifestctx.readdelta()
583
583
584 @propertycache
584 @propertycache
585 def _parents(self):
585 def _parents(self):
586 repo = self._repo
586 repo = self._repo
587 p1, p2 = repo.changelog.parentrevs(self._rev)
587 p1, p2 = repo.changelog.parentrevs(self._rev)
588 if p2 == nullrev:
588 if p2 == nullrev:
589 return [changectx(repo, p1)]
589 return [changectx(repo, p1)]
590 return [changectx(repo, p1), changectx(repo, p2)]
590 return [changectx(repo, p1), changectx(repo, p2)]
591
591
592 def changeset(self):
592 def changeset(self):
593 c = self._changeset
593 c = self._changeset
594 return (
594 return (
595 c.manifest,
595 c.manifest,
596 c.user,
596 c.user,
597 c.date,
597 c.date,
598 c.files,
598 c.files,
599 c.description,
599 c.description,
600 c.extra,
600 c.extra,
601 )
601 )
602 def manifestnode(self):
602 def manifestnode(self):
603 return self._changeset.manifest
603 return self._changeset.manifest
604
604
605 def user(self):
605 def user(self):
606 return self._changeset.user
606 return self._changeset.user
607 def date(self):
607 def date(self):
608 return self._changeset.date
608 return self._changeset.date
609 def files(self):
609 def files(self):
610 return self._changeset.files
610 return self._changeset.files
611 def description(self):
611 def description(self):
612 return self._changeset.description
612 return self._changeset.description
613 def branch(self):
613 def branch(self):
614 return encoding.tolocal(self._changeset.extra.get("branch"))
614 return encoding.tolocal(self._changeset.extra.get("branch"))
615 def closesbranch(self):
615 def closesbranch(self):
616 return 'close' in self._changeset.extra
616 return 'close' in self._changeset.extra
617 def extra(self):
617 def extra(self):
618 return self._changeset.extra
618 return self._changeset.extra
619 def tags(self):
619 def tags(self):
620 return self._repo.nodetags(self._node)
620 return self._repo.nodetags(self._node)
621 def bookmarks(self):
621 def bookmarks(self):
622 return self._repo.nodebookmarks(self._node)
622 return self._repo.nodebookmarks(self._node)
623 def phase(self):
623 def phase(self):
624 return self._repo._phasecache.phase(self._repo, self._rev)
624 return self._repo._phasecache.phase(self._repo, self._rev)
625 def hidden(self):
625 def hidden(self):
626 return self._rev in repoview.filterrevs(self._repo, 'visible')
626 return self._rev in repoview.filterrevs(self._repo, 'visible')
627
627
628 def isinmemory(self):
628 def isinmemory(self):
629 return False
629 return False
630
630
631 def children(self):
631 def children(self):
632 """return contexts for each child changeset"""
632 """return contexts for each child changeset"""
633 c = self._repo.changelog.children(self._node)
633 c = self._repo.changelog.children(self._node)
634 return [changectx(self._repo, x) for x in c]
634 return [changectx(self._repo, x) for x in c]
635
635
636 def ancestors(self):
636 def ancestors(self):
637 for a in self._repo.changelog.ancestors([self._rev]):
637 for a in self._repo.changelog.ancestors([self._rev]):
638 yield changectx(self._repo, a)
638 yield changectx(self._repo, a)
639
639
640 def descendants(self):
640 def descendants(self):
641 for d in self._repo.changelog.descendants([self._rev]):
641 for d in self._repo.changelog.descendants([self._rev]):
642 yield changectx(self._repo, d)
642 yield changectx(self._repo, d)
643
643
644 def filectx(self, path, fileid=None, filelog=None):
644 def filectx(self, path, fileid=None, filelog=None):
645 """get a file context from this changeset"""
645 """get a file context from this changeset"""
646 if fileid is None:
646 if fileid is None:
647 fileid = self.filenode(path)
647 fileid = self.filenode(path)
648 return filectx(self._repo, path, fileid=fileid,
648 return filectx(self._repo, path, fileid=fileid,
649 changectx=self, filelog=filelog)
649 changectx=self, filelog=filelog)
650
650
651 def ancestor(self, c2, warn=False):
651 def ancestor(self, c2, warn=False):
652 """return the "best" ancestor context of self and c2
652 """return the "best" ancestor context of self and c2
653
653
654 If there are multiple candidates, it will show a message and check
654 If there are multiple candidates, it will show a message and check
655 merge.preferancestor configuration before falling back to the
655 merge.preferancestor configuration before falling back to the
656 revlog ancestor."""
656 revlog ancestor."""
657 # deal with workingctxs
657 # deal with workingctxs
658 n2 = c2._node
658 n2 = c2._node
659 if n2 is None:
659 if n2 is None:
660 n2 = c2._parents[0]._node
660 n2 = c2._parents[0]._node
661 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
661 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
662 if not cahs:
662 if not cahs:
663 anc = nullid
663 anc = nullid
664 elif len(cahs) == 1:
664 elif len(cahs) == 1:
665 anc = cahs[0]
665 anc = cahs[0]
666 else:
666 else:
667 # experimental config: merge.preferancestor
667 # experimental config: merge.preferancestor
668 for r in self._repo.ui.configlist('merge', 'preferancestor'):
668 for r in self._repo.ui.configlist('merge', 'preferancestor'):
669 try:
669 try:
670 ctx = changectx(self._repo, r)
670 ctx = changectx(self._repo, r)
671 except error.RepoLookupError:
671 except error.RepoLookupError:
672 continue
672 continue
673 anc = ctx.node()
673 anc = ctx.node()
674 if anc in cahs:
674 if anc in cahs:
675 break
675 break
676 else:
676 else:
677 anc = self._repo.changelog.ancestor(self._node, n2)
677 anc = self._repo.changelog.ancestor(self._node, n2)
678 if warn:
678 if warn:
679 self._repo.ui.status(
679 self._repo.ui.status(
680 (_("note: using %s as ancestor of %s and %s\n") %
680 (_("note: using %s as ancestor of %s and %s\n") %
681 (short(anc), short(self._node), short(n2))) +
681 (short(anc), short(self._node), short(n2))) +
682 ''.join(_(" alternatively, use --config "
682 ''.join(_(" alternatively, use --config "
683 "merge.preferancestor=%s\n") %
683 "merge.preferancestor=%s\n") %
684 short(n) for n in sorted(cahs) if n != anc))
684 short(n) for n in sorted(cahs) if n != anc))
685 return changectx(self._repo, anc)
685 return changectx(self._repo, anc)
686
686
687 def descendant(self, other):
687 def descendant(self, other):
688 """True if other is descendant of this changeset"""
688 """True if other is descendant of this changeset"""
689 return self._repo.changelog.descendant(self._rev, other._rev)
689 return self._repo.changelog.descendant(self._rev, other._rev)
690
690
691 def walk(self, match):
691 def walk(self, match):
692 '''Generates matching file names.'''
692 '''Generates matching file names.'''
693
693
694 # Wrap match.bad method to have message with nodeid
694 # Wrap match.bad method to have message with nodeid
695 def bad(fn, msg):
695 def bad(fn, msg):
696 # The manifest doesn't know about subrepos, so don't complain about
696 # The manifest doesn't know about subrepos, so don't complain about
697 # paths into valid subrepos.
697 # paths into valid subrepos.
698 if any(fn == s or fn.startswith(s + '/')
698 if any(fn == s or fn.startswith(s + '/')
699 for s in self.substate):
699 for s in self.substate):
700 return
700 return
701 match.bad(fn, _('no such file in rev %s') % self)
701 match.bad(fn, _('no such file in rev %s') % self)
702
702
703 m = matchmod.badmatch(match, bad)
703 m = matchmod.badmatch(match, bad)
704 return self._manifest.walk(m)
704 return self._manifest.walk(m)
705
705
706 def matches(self, match):
706 def matches(self, match):
707 return self.walk(match)
707 return self.walk(match)
708
708
709 class basefilectx(object):
709 class basefilectx(object):
710 """A filecontext object represents the common logic for its children:
710 """A filecontext object represents the common logic for its children:
711 filectx: read-only access to a filerevision that is already present
711 filectx: read-only access to a filerevision that is already present
712 in the repo,
712 in the repo,
713 workingfilectx: a filecontext that represents files from the working
713 workingfilectx: a filecontext that represents files from the working
714 directory,
714 directory,
715 memfilectx: a filecontext that represents files in-memory,
715 memfilectx: a filecontext that represents files in-memory,
716 overlayfilectx: duplicate another filecontext with some fields overridden.
716 overlayfilectx: duplicate another filecontext with some fields overridden.
717 """
717 """
718 @propertycache
718 @propertycache
719 def _filelog(self):
719 def _filelog(self):
720 return self._repo.file(self._path)
720 return self._repo.file(self._path)
721
721
722 @propertycache
722 @propertycache
723 def _changeid(self):
723 def _changeid(self):
724 if r'_changeid' in self.__dict__:
724 if r'_changeid' in self.__dict__:
725 return self._changeid
725 return self._changeid
726 elif r'_changectx' in self.__dict__:
726 elif r'_changectx' in self.__dict__:
727 return self._changectx.rev()
727 return self._changectx.rev()
728 elif r'_descendantrev' in self.__dict__:
728 elif r'_descendantrev' in self.__dict__:
729 # this file context was created from a revision with a known
729 # this file context was created from a revision with a known
730 # descendant, we can (lazily) correct for linkrev aliases
730 # descendant, we can (lazily) correct for linkrev aliases
731 return self._adjustlinkrev(self._descendantrev)
731 return self._adjustlinkrev(self._descendantrev)
732 else:
732 else:
733 return self._filelog.linkrev(self._filerev)
733 return self._filelog.linkrev(self._filerev)
734
734
735 @propertycache
735 @propertycache
736 def _filenode(self):
736 def _filenode(self):
737 if r'_fileid' in self.__dict__:
737 if r'_fileid' in self.__dict__:
738 return self._filelog.lookup(self._fileid)
738 return self._filelog.lookup(self._fileid)
739 else:
739 else:
740 return self._changectx.filenode(self._path)
740 return self._changectx.filenode(self._path)
741
741
742 @propertycache
742 @propertycache
743 def _filerev(self):
743 def _filerev(self):
744 return self._filelog.rev(self._filenode)
744 return self._filelog.rev(self._filenode)
745
745
746 @propertycache
746 @propertycache
747 def _repopath(self):
747 def _repopath(self):
748 return self._path
748 return self._path
749
749
750 def __nonzero__(self):
750 def __nonzero__(self):
751 try:
751 try:
752 self._filenode
752 self._filenode
753 return True
753 return True
754 except error.LookupError:
754 except error.LookupError:
755 # file is missing
755 # file is missing
756 return False
756 return False
757
757
758 __bool__ = __nonzero__
758 __bool__ = __nonzero__
759
759
760 def __bytes__(self):
760 def __bytes__(self):
761 try:
761 try:
762 return "%s@%s" % (self.path(), self._changectx)
762 return "%s@%s" % (self.path(), self._changectx)
763 except error.LookupError:
763 except error.LookupError:
764 return "%s@???" % self.path()
764 return "%s@???" % self.path()
765
765
766 __str__ = encoding.strmethod(__bytes__)
766 __str__ = encoding.strmethod(__bytes__)
767
767
768 def __repr__(self):
768 def __repr__(self):
769 return "<%s %s>" % (type(self).__name__, str(self))
769 return "<%s %s>" % (type(self).__name__, str(self))
770
770
771 def __hash__(self):
771 def __hash__(self):
772 try:
772 try:
773 return hash((self._path, self._filenode))
773 return hash((self._path, self._filenode))
774 except AttributeError:
774 except AttributeError:
775 return id(self)
775 return id(self)
776
776
777 def __eq__(self, other):
777 def __eq__(self, other):
778 try:
778 try:
779 return (type(self) == type(other) and self._path == other._path
779 return (type(self) == type(other) and self._path == other._path
780 and self._filenode == other._filenode)
780 and self._filenode == other._filenode)
781 except AttributeError:
781 except AttributeError:
782 return False
782 return False
783
783
784 def __ne__(self, other):
784 def __ne__(self, other):
785 return not (self == other)
785 return not (self == other)
786
786
787 def filerev(self):
787 def filerev(self):
788 return self._filerev
788 return self._filerev
789 def filenode(self):
789 def filenode(self):
790 return self._filenode
790 return self._filenode
791 @propertycache
791 @propertycache
792 def _flags(self):
792 def _flags(self):
793 return self._changectx.flags(self._path)
793 return self._changectx.flags(self._path)
794 def flags(self):
794 def flags(self):
795 return self._flags
795 return self._flags
796 def filelog(self):
796 def filelog(self):
797 return self._filelog
797 return self._filelog
798 def rev(self):
798 def rev(self):
799 return self._changeid
799 return self._changeid
800 def linkrev(self):
800 def linkrev(self):
801 return self._filelog.linkrev(self._filerev)
801 return self._filelog.linkrev(self._filerev)
802 def node(self):
802 def node(self):
803 return self._changectx.node()
803 return self._changectx.node()
804 def hex(self):
804 def hex(self):
805 return self._changectx.hex()
805 return self._changectx.hex()
806 def user(self):
806 def user(self):
807 return self._changectx.user()
807 return self._changectx.user()
808 def date(self):
808 def date(self):
809 return self._changectx.date()
809 return self._changectx.date()
810 def files(self):
810 def files(self):
811 return self._changectx.files()
811 return self._changectx.files()
812 def description(self):
812 def description(self):
813 return self._changectx.description()
813 return self._changectx.description()
814 def branch(self):
814 def branch(self):
815 return self._changectx.branch()
815 return self._changectx.branch()
816 def extra(self):
816 def extra(self):
817 return self._changectx.extra()
817 return self._changectx.extra()
818 def phase(self):
818 def phase(self):
819 return self._changectx.phase()
819 return self._changectx.phase()
820 def phasestr(self):
820 def phasestr(self):
821 return self._changectx.phasestr()
821 return self._changectx.phasestr()
822 def obsolete(self):
822 def obsolete(self):
823 return self._changectx.obsolete()
823 return self._changectx.obsolete()
824 def instabilities(self):
825 return self._changectx.instabilities()
824 def manifest(self):
826 def manifest(self):
825 return self._changectx.manifest()
827 return self._changectx.manifest()
826 def changectx(self):
828 def changectx(self):
827 return self._changectx
829 return self._changectx
828 def renamed(self):
830 def renamed(self):
829 return self._copied
831 return self._copied
830 def repo(self):
832 def repo(self):
831 return self._repo
833 return self._repo
832 def size(self):
834 def size(self):
833 return len(self.data())
835 return len(self.data())
834
836
835 def path(self):
837 def path(self):
836 return self._path
838 return self._path
837
839
838 def isbinary(self):
840 def isbinary(self):
839 try:
841 try:
840 return util.binary(self.data())
842 return util.binary(self.data())
841 except IOError:
843 except IOError:
842 return False
844 return False
843 def isexec(self):
845 def isexec(self):
844 return 'x' in self.flags()
846 return 'x' in self.flags()
845 def islink(self):
847 def islink(self):
846 return 'l' in self.flags()
848 return 'l' in self.flags()
847
849
848 def isabsent(self):
850 def isabsent(self):
849 """whether this filectx represents a file not in self._changectx
851 """whether this filectx represents a file not in self._changectx
850
852
851 This is mainly for merge code to detect change/delete conflicts. This is
853 This is mainly for merge code to detect change/delete conflicts. This is
852 expected to be True for all subclasses of basectx."""
854 expected to be True for all subclasses of basectx."""
853 return False
855 return False
854
856
855 _customcmp = False
857 _customcmp = False
856 def cmp(self, fctx):
858 def cmp(self, fctx):
857 """compare with other file context
859 """compare with other file context
858
860
859 returns True if different than fctx.
861 returns True if different than fctx.
860 """
862 """
861 if fctx._customcmp:
863 if fctx._customcmp:
862 return fctx.cmp(self)
864 return fctx.cmp(self)
863
865
864 if (fctx._filenode is None
866 if (fctx._filenode is None
865 and (self._repo._encodefilterpats
867 and (self._repo._encodefilterpats
866 # if file data starts with '\1\n', empty metadata block is
868 # if file data starts with '\1\n', empty metadata block is
867 # prepended, which adds 4 bytes to filelog.size().
869 # prepended, which adds 4 bytes to filelog.size().
868 or self.size() - 4 == fctx.size())
870 or self.size() - 4 == fctx.size())
869 or self.size() == fctx.size()):
871 or self.size() == fctx.size()):
870 return self._filelog.cmp(self._filenode, fctx.data())
872 return self._filelog.cmp(self._filenode, fctx.data())
871
873
872 return True
874 return True
873
875
874 def _adjustlinkrev(self, srcrev, inclusive=False):
876 def _adjustlinkrev(self, srcrev, inclusive=False):
875 """return the first ancestor of <srcrev> introducing <fnode>
877 """return the first ancestor of <srcrev> introducing <fnode>
876
878
877 If the linkrev of the file revision does not point to an ancestor of
879 If the linkrev of the file revision does not point to an ancestor of
878 srcrev, we'll walk down the ancestors until we find one introducing
880 srcrev, we'll walk down the ancestors until we find one introducing
879 this file revision.
881 this file revision.
880
882
881 :srcrev: the changeset revision we search ancestors from
883 :srcrev: the changeset revision we search ancestors from
882 :inclusive: if true, the src revision will also be checked
884 :inclusive: if true, the src revision will also be checked
883 """
885 """
884 repo = self._repo
886 repo = self._repo
885 cl = repo.unfiltered().changelog
887 cl = repo.unfiltered().changelog
886 mfl = repo.manifestlog
888 mfl = repo.manifestlog
887 # fetch the linkrev
889 # fetch the linkrev
888 lkr = self.linkrev()
890 lkr = self.linkrev()
889 # hack to reuse ancestor computation when searching for renames
891 # hack to reuse ancestor computation when searching for renames
890 memberanc = getattr(self, '_ancestrycontext', None)
892 memberanc = getattr(self, '_ancestrycontext', None)
891 iteranc = None
893 iteranc = None
892 if srcrev is None:
894 if srcrev is None:
893 # wctx case, used by workingfilectx during mergecopy
895 # wctx case, used by workingfilectx during mergecopy
894 revs = [p.rev() for p in self._repo[None].parents()]
896 revs = [p.rev() for p in self._repo[None].parents()]
895 inclusive = True # we skipped the real (revless) source
897 inclusive = True # we skipped the real (revless) source
896 else:
898 else:
897 revs = [srcrev]
899 revs = [srcrev]
898 if memberanc is None:
900 if memberanc is None:
899 memberanc = iteranc = cl.ancestors(revs, lkr,
901 memberanc = iteranc = cl.ancestors(revs, lkr,
900 inclusive=inclusive)
902 inclusive=inclusive)
901 # check if this linkrev is an ancestor of srcrev
903 # check if this linkrev is an ancestor of srcrev
902 if lkr not in memberanc:
904 if lkr not in memberanc:
903 if iteranc is None:
905 if iteranc is None:
904 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
906 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
905 fnode = self._filenode
907 fnode = self._filenode
906 path = self._path
908 path = self._path
907 for a in iteranc:
909 for a in iteranc:
908 ac = cl.read(a) # get changeset data (we avoid object creation)
910 ac = cl.read(a) # get changeset data (we avoid object creation)
909 if path in ac[3]: # checking the 'files' field.
911 if path in ac[3]: # checking the 'files' field.
910 # The file has been touched, check if the content is
912 # The file has been touched, check if the content is
911 # similar to the one we search for.
913 # similar to the one we search for.
912 if fnode == mfl[ac[0]].readfast().get(path):
914 if fnode == mfl[ac[0]].readfast().get(path):
913 return a
915 return a
914 # In theory, we should never get out of that loop without a result.
916 # In theory, we should never get out of that loop without a result.
915 # But if manifest uses a buggy file revision (not children of the
917 # But if manifest uses a buggy file revision (not children of the
916 # one it replaces) we could. Such a buggy situation will likely
918 # one it replaces) we could. Such a buggy situation will likely
917 # result is crash somewhere else at to some point.
919 # result is crash somewhere else at to some point.
918 return lkr
920 return lkr
919
921
920 def introrev(self):
922 def introrev(self):
921 """return the rev of the changeset which introduced this file revision
923 """return the rev of the changeset which introduced this file revision
922
924
923 This method is different from linkrev because it take into account the
925 This method is different from linkrev because it take into account the
924 changeset the filectx was created from. It ensures the returned
926 changeset the filectx was created from. It ensures the returned
925 revision is one of its ancestors. This prevents bugs from
927 revision is one of its ancestors. This prevents bugs from
926 'linkrev-shadowing' when a file revision is used by multiple
928 'linkrev-shadowing' when a file revision is used by multiple
927 changesets.
929 changesets.
928 """
930 """
929 lkr = self.linkrev()
931 lkr = self.linkrev()
930 attrs = vars(self)
932 attrs = vars(self)
931 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
933 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
932 if noctx or self.rev() == lkr:
934 if noctx or self.rev() == lkr:
933 return self.linkrev()
935 return self.linkrev()
934 return self._adjustlinkrev(self.rev(), inclusive=True)
936 return self._adjustlinkrev(self.rev(), inclusive=True)
935
937
936 def _parentfilectx(self, path, fileid, filelog):
938 def _parentfilectx(self, path, fileid, filelog):
937 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
939 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
938 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
940 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
939 if '_changeid' in vars(self) or '_changectx' in vars(self):
941 if '_changeid' in vars(self) or '_changectx' in vars(self):
940 # If self is associated with a changeset (probably explicitly
942 # If self is associated with a changeset (probably explicitly
941 # fed), ensure the created filectx is associated with a
943 # fed), ensure the created filectx is associated with a
942 # changeset that is an ancestor of self.changectx.
944 # changeset that is an ancestor of self.changectx.
943 # This lets us later use _adjustlinkrev to get a correct link.
945 # This lets us later use _adjustlinkrev to get a correct link.
944 fctx._descendantrev = self.rev()
946 fctx._descendantrev = self.rev()
945 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
947 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
946 elif '_descendantrev' in vars(self):
948 elif '_descendantrev' in vars(self):
947 # Otherwise propagate _descendantrev if we have one associated.
949 # Otherwise propagate _descendantrev if we have one associated.
948 fctx._descendantrev = self._descendantrev
950 fctx._descendantrev = self._descendantrev
949 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
951 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
950 return fctx
952 return fctx
951
953
952 def parents(self):
954 def parents(self):
953 _path = self._path
955 _path = self._path
954 fl = self._filelog
956 fl = self._filelog
955 parents = self._filelog.parents(self._filenode)
957 parents = self._filelog.parents(self._filenode)
956 pl = [(_path, node, fl) for node in parents if node != nullid]
958 pl = [(_path, node, fl) for node in parents if node != nullid]
957
959
958 r = fl.renamed(self._filenode)
960 r = fl.renamed(self._filenode)
959 if r:
961 if r:
960 # - In the simple rename case, both parent are nullid, pl is empty.
962 # - In the simple rename case, both parent are nullid, pl is empty.
961 # - In case of merge, only one of the parent is null id and should
963 # - In case of merge, only one of the parent is null id and should
962 # be replaced with the rename information. This parent is -always-
964 # be replaced with the rename information. This parent is -always-
963 # the first one.
965 # the first one.
964 #
966 #
965 # As null id have always been filtered out in the previous list
967 # As null id have always been filtered out in the previous list
966 # comprehension, inserting to 0 will always result in "replacing
968 # comprehension, inserting to 0 will always result in "replacing
967 # first nullid parent with rename information.
969 # first nullid parent with rename information.
968 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
970 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
969
971
970 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
972 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
971
973
972 def p1(self):
974 def p1(self):
973 return self.parents()[0]
975 return self.parents()[0]
974
976
975 def p2(self):
977 def p2(self):
976 p = self.parents()
978 p = self.parents()
977 if len(p) == 2:
979 if len(p) == 2:
978 return p[1]
980 return p[1]
979 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
981 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
980
982
981 def annotate(self, follow=False, linenumber=False, skiprevs=None,
983 def annotate(self, follow=False, linenumber=False, skiprevs=None,
982 diffopts=None):
984 diffopts=None):
983 '''returns a list of tuples of ((ctx, number), line) for each line
985 '''returns a list of tuples of ((ctx, number), line) for each line
984 in the file, where ctx is the filectx of the node where
986 in the file, where ctx is the filectx of the node where
985 that line was last changed; if linenumber parameter is true, number is
987 that line was last changed; if linenumber parameter is true, number is
986 the line number at the first appearance in the managed file, otherwise,
988 the line number at the first appearance in the managed file, otherwise,
987 number has a fixed value of False.
989 number has a fixed value of False.
988 '''
990 '''
989
991
990 def lines(text):
992 def lines(text):
991 if text.endswith("\n"):
993 if text.endswith("\n"):
992 return text.count("\n")
994 return text.count("\n")
993 return text.count("\n") + int(bool(text))
995 return text.count("\n") + int(bool(text))
994
996
995 if linenumber:
997 if linenumber:
996 def decorate(text, rev):
998 def decorate(text, rev):
997 return ([annotateline(fctx=rev, lineno=i)
999 return ([annotateline(fctx=rev, lineno=i)
998 for i in xrange(1, lines(text) + 1)], text)
1000 for i in xrange(1, lines(text) + 1)], text)
999 else:
1001 else:
1000 def decorate(text, rev):
1002 def decorate(text, rev):
1001 return ([annotateline(fctx=rev)] * lines(text), text)
1003 return ([annotateline(fctx=rev)] * lines(text), text)
1002
1004
1003 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1005 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
1004
1006
1005 def parents(f):
1007 def parents(f):
1006 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1008 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
1007 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1009 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1008 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1010 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1009 # isn't an ancestor of the srcrev.
1011 # isn't an ancestor of the srcrev.
1010 f._changeid
1012 f._changeid
1011 pl = f.parents()
1013 pl = f.parents()
1012
1014
1013 # Don't return renamed parents if we aren't following.
1015 # Don't return renamed parents if we aren't following.
1014 if not follow:
1016 if not follow:
1015 pl = [p for p in pl if p.path() == f.path()]
1017 pl = [p for p in pl if p.path() == f.path()]
1016
1018
1017 # renamed filectx won't have a filelog yet, so set it
1019 # renamed filectx won't have a filelog yet, so set it
1018 # from the cache to save time
1020 # from the cache to save time
1019 for p in pl:
1021 for p in pl:
1020 if not '_filelog' in p.__dict__:
1022 if not '_filelog' in p.__dict__:
1021 p._filelog = getlog(p.path())
1023 p._filelog = getlog(p.path())
1022
1024
1023 return pl
1025 return pl
1024
1026
1025 # use linkrev to find the first changeset where self appeared
1027 # use linkrev to find the first changeset where self appeared
1026 base = self
1028 base = self
1027 introrev = self.introrev()
1029 introrev = self.introrev()
1028 if self.rev() != introrev:
1030 if self.rev() != introrev:
1029 base = self.filectx(self.filenode(), changeid=introrev)
1031 base = self.filectx(self.filenode(), changeid=introrev)
1030 if getattr(base, '_ancestrycontext', None) is None:
1032 if getattr(base, '_ancestrycontext', None) is None:
1031 cl = self._repo.changelog
1033 cl = self._repo.changelog
1032 if introrev is None:
1034 if introrev is None:
1033 # wctx is not inclusive, but works because _ancestrycontext
1035 # wctx is not inclusive, but works because _ancestrycontext
1034 # is used to test filelog revisions
1036 # is used to test filelog revisions
1035 ac = cl.ancestors([p.rev() for p in base.parents()],
1037 ac = cl.ancestors([p.rev() for p in base.parents()],
1036 inclusive=True)
1038 inclusive=True)
1037 else:
1039 else:
1038 ac = cl.ancestors([introrev], inclusive=True)
1040 ac = cl.ancestors([introrev], inclusive=True)
1039 base._ancestrycontext = ac
1041 base._ancestrycontext = ac
1040
1042
1041 # This algorithm would prefer to be recursive, but Python is a
1043 # This algorithm would prefer to be recursive, but Python is a
1042 # bit recursion-hostile. Instead we do an iterative
1044 # bit recursion-hostile. Instead we do an iterative
1043 # depth-first search.
1045 # depth-first search.
1044
1046
1045 # 1st DFS pre-calculates pcache and needed
1047 # 1st DFS pre-calculates pcache and needed
1046 visit = [base]
1048 visit = [base]
1047 pcache = {}
1049 pcache = {}
1048 needed = {base: 1}
1050 needed = {base: 1}
1049 while visit:
1051 while visit:
1050 f = visit.pop()
1052 f = visit.pop()
1051 if f in pcache:
1053 if f in pcache:
1052 continue
1054 continue
1053 pl = parents(f)
1055 pl = parents(f)
1054 pcache[f] = pl
1056 pcache[f] = pl
1055 for p in pl:
1057 for p in pl:
1056 needed[p] = needed.get(p, 0) + 1
1058 needed[p] = needed.get(p, 0) + 1
1057 if p not in pcache:
1059 if p not in pcache:
1058 visit.append(p)
1060 visit.append(p)
1059
1061
1060 # 2nd DFS does the actual annotate
1062 # 2nd DFS does the actual annotate
1061 visit[:] = [base]
1063 visit[:] = [base]
1062 hist = {}
1064 hist = {}
1063 while visit:
1065 while visit:
1064 f = visit[-1]
1066 f = visit[-1]
1065 if f in hist:
1067 if f in hist:
1066 visit.pop()
1068 visit.pop()
1067 continue
1069 continue
1068
1070
1069 ready = True
1071 ready = True
1070 pl = pcache[f]
1072 pl = pcache[f]
1071 for p in pl:
1073 for p in pl:
1072 if p not in hist:
1074 if p not in hist:
1073 ready = False
1075 ready = False
1074 visit.append(p)
1076 visit.append(p)
1075 if ready:
1077 if ready:
1076 visit.pop()
1078 visit.pop()
1077 curr = decorate(f.data(), f)
1079 curr = decorate(f.data(), f)
1078 skipchild = False
1080 skipchild = False
1079 if skiprevs is not None:
1081 if skiprevs is not None:
1080 skipchild = f._changeid in skiprevs
1082 skipchild = f._changeid in skiprevs
1081 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1083 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1082 diffopts)
1084 diffopts)
1083 for p in pl:
1085 for p in pl:
1084 if needed[p] == 1:
1086 if needed[p] == 1:
1085 del hist[p]
1087 del hist[p]
1086 del needed[p]
1088 del needed[p]
1087 else:
1089 else:
1088 needed[p] -= 1
1090 needed[p] -= 1
1089
1091
1090 hist[f] = curr
1092 hist[f] = curr
1091 del pcache[f]
1093 del pcache[f]
1092
1094
1093 return zip(hist[base][0], hist[base][1].splitlines(True))
1095 return zip(hist[base][0], hist[base][1].splitlines(True))
1094
1096
1095 def ancestors(self, followfirst=False):
1097 def ancestors(self, followfirst=False):
1096 visit = {}
1098 visit = {}
1097 c = self
1099 c = self
1098 if followfirst:
1100 if followfirst:
1099 cut = 1
1101 cut = 1
1100 else:
1102 else:
1101 cut = None
1103 cut = None
1102
1104
1103 while True:
1105 while True:
1104 for parent in c.parents()[:cut]:
1106 for parent in c.parents()[:cut]:
1105 visit[(parent.linkrev(), parent.filenode())] = parent
1107 visit[(parent.linkrev(), parent.filenode())] = parent
1106 if not visit:
1108 if not visit:
1107 break
1109 break
1108 c = visit.pop(max(visit))
1110 c = visit.pop(max(visit))
1109 yield c
1111 yield c
1110
1112
1111 def decodeddata(self):
1113 def decodeddata(self):
1112 """Returns `data()` after running repository decoding filters.
1114 """Returns `data()` after running repository decoding filters.
1113
1115
1114 This is often equivalent to how the data would be expressed on disk.
1116 This is often equivalent to how the data would be expressed on disk.
1115 """
1117 """
1116 return self._repo.wwritedata(self.path(), self.data())
1118 return self._repo.wwritedata(self.path(), self.data())
1117
1119
1118 @attr.s(slots=True, frozen=True)
1120 @attr.s(slots=True, frozen=True)
1119 class annotateline(object):
1121 class annotateline(object):
1120 fctx = attr.ib()
1122 fctx = attr.ib()
1121 lineno = attr.ib(default=False)
1123 lineno = attr.ib(default=False)
1122 # Whether this annotation was the result of a skip-annotate.
1124 # Whether this annotation was the result of a skip-annotate.
1123 skip = attr.ib(default=False)
1125 skip = attr.ib(default=False)
1124
1126
1125 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1127 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1126 r'''
1128 r'''
1127 Given parent and child fctxes and annotate data for parents, for all lines
1129 Given parent and child fctxes and annotate data for parents, for all lines
1128 in either parent that match the child, annotate the child with the parent's
1130 in either parent that match the child, annotate the child with the parent's
1129 data.
1131 data.
1130
1132
1131 Additionally, if `skipchild` is True, replace all other lines with parent
1133 Additionally, if `skipchild` is True, replace all other lines with parent
1132 annotate data as well such that child is never blamed for any lines.
1134 annotate data as well such that child is never blamed for any lines.
1133
1135
1134 See test-annotate.py for unit tests.
1136 See test-annotate.py for unit tests.
1135 '''
1137 '''
1136 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1138 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1137 for parent in parents]
1139 for parent in parents]
1138
1140
1139 if skipchild:
1141 if skipchild:
1140 # Need to iterate over the blocks twice -- make it a list
1142 # Need to iterate over the blocks twice -- make it a list
1141 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1143 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1142 # Mercurial currently prefers p2 over p1 for annotate.
1144 # Mercurial currently prefers p2 over p1 for annotate.
1143 # TODO: change this?
1145 # TODO: change this?
1144 for parent, blocks in pblocks:
1146 for parent, blocks in pblocks:
1145 for (a1, a2, b1, b2), t in blocks:
1147 for (a1, a2, b1, b2), t in blocks:
1146 # Changed blocks ('!') or blocks made only of blank lines ('~')
1148 # Changed blocks ('!') or blocks made only of blank lines ('~')
1147 # belong to the child.
1149 # belong to the child.
1148 if t == '=':
1150 if t == '=':
1149 child[0][b1:b2] = parent[0][a1:a2]
1151 child[0][b1:b2] = parent[0][a1:a2]
1150
1152
1151 if skipchild:
1153 if skipchild:
1152 # Now try and match up anything that couldn't be matched,
1154 # Now try and match up anything that couldn't be matched,
1153 # Reversing pblocks maintains bias towards p2, matching above
1155 # Reversing pblocks maintains bias towards p2, matching above
1154 # behavior.
1156 # behavior.
1155 pblocks.reverse()
1157 pblocks.reverse()
1156
1158
1157 # The heuristics are:
1159 # The heuristics are:
1158 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1160 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1159 # This could potentially be smarter but works well enough.
1161 # This could potentially be smarter but works well enough.
1160 # * For a non-matching section, do a best-effort fit. Match lines in
1162 # * For a non-matching section, do a best-effort fit. Match lines in
1161 # diff hunks 1:1, dropping lines as necessary.
1163 # diff hunks 1:1, dropping lines as necessary.
1162 # * Repeat the last line as a last resort.
1164 # * Repeat the last line as a last resort.
1163
1165
1164 # First, replace as much as possible without repeating the last line.
1166 # First, replace as much as possible without repeating the last line.
1165 remaining = [(parent, []) for parent, _blocks in pblocks]
1167 remaining = [(parent, []) for parent, _blocks in pblocks]
1166 for idx, (parent, blocks) in enumerate(pblocks):
1168 for idx, (parent, blocks) in enumerate(pblocks):
1167 for (a1, a2, b1, b2), _t in blocks:
1169 for (a1, a2, b1, b2), _t in blocks:
1168 if a2 - a1 >= b2 - b1:
1170 if a2 - a1 >= b2 - b1:
1169 for bk in xrange(b1, b2):
1171 for bk in xrange(b1, b2):
1170 if child[0][bk].fctx == childfctx:
1172 if child[0][bk].fctx == childfctx:
1171 ak = min(a1 + (bk - b1), a2 - 1)
1173 ak = min(a1 + (bk - b1), a2 - 1)
1172 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1174 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1173 else:
1175 else:
1174 remaining[idx][1].append((a1, a2, b1, b2))
1176 remaining[idx][1].append((a1, a2, b1, b2))
1175
1177
1176 # Then, look at anything left, which might involve repeating the last
1178 # Then, look at anything left, which might involve repeating the last
1177 # line.
1179 # line.
1178 for parent, blocks in remaining:
1180 for parent, blocks in remaining:
1179 for a1, a2, b1, b2 in blocks:
1181 for a1, a2, b1, b2 in blocks:
1180 for bk in xrange(b1, b2):
1182 for bk in xrange(b1, b2):
1181 if child[0][bk].fctx == childfctx:
1183 if child[0][bk].fctx == childfctx:
1182 ak = min(a1 + (bk - b1), a2 - 1)
1184 ak = min(a1 + (bk - b1), a2 - 1)
1183 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1185 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1184 return child
1186 return child
1185
1187
1186 class filectx(basefilectx):
1188 class filectx(basefilectx):
1187 """A filecontext object makes access to data related to a particular
1189 """A filecontext object makes access to data related to a particular
1188 filerevision convenient."""
1190 filerevision convenient."""
1189 def __init__(self, repo, path, changeid=None, fileid=None,
1191 def __init__(self, repo, path, changeid=None, fileid=None,
1190 filelog=None, changectx=None):
1192 filelog=None, changectx=None):
1191 """changeid can be a changeset revision, node, or tag.
1193 """changeid can be a changeset revision, node, or tag.
1192 fileid can be a file revision or node."""
1194 fileid can be a file revision or node."""
1193 self._repo = repo
1195 self._repo = repo
1194 self._path = path
1196 self._path = path
1195
1197
1196 assert (changeid is not None
1198 assert (changeid is not None
1197 or fileid is not None
1199 or fileid is not None
1198 or changectx is not None), \
1200 or changectx is not None), \
1199 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1201 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1200 % (changeid, fileid, changectx))
1202 % (changeid, fileid, changectx))
1201
1203
1202 if filelog is not None:
1204 if filelog is not None:
1203 self._filelog = filelog
1205 self._filelog = filelog
1204
1206
1205 if changeid is not None:
1207 if changeid is not None:
1206 self._changeid = changeid
1208 self._changeid = changeid
1207 if changectx is not None:
1209 if changectx is not None:
1208 self._changectx = changectx
1210 self._changectx = changectx
1209 if fileid is not None:
1211 if fileid is not None:
1210 self._fileid = fileid
1212 self._fileid = fileid
1211
1213
1212 @propertycache
1214 @propertycache
1213 def _changectx(self):
1215 def _changectx(self):
1214 try:
1216 try:
1215 return changectx(self._repo, self._changeid)
1217 return changectx(self._repo, self._changeid)
1216 except error.FilteredRepoLookupError:
1218 except error.FilteredRepoLookupError:
1217 # Linkrev may point to any revision in the repository. When the
1219 # Linkrev may point to any revision in the repository. When the
1218 # repository is filtered this may lead to `filectx` trying to build
1220 # repository is filtered this may lead to `filectx` trying to build
1219 # `changectx` for filtered revision. In such case we fallback to
1221 # `changectx` for filtered revision. In such case we fallback to
1220 # creating `changectx` on the unfiltered version of the reposition.
1222 # creating `changectx` on the unfiltered version of the reposition.
1221 # This fallback should not be an issue because `changectx` from
1223 # This fallback should not be an issue because `changectx` from
1222 # `filectx` are not used in complex operations that care about
1224 # `filectx` are not used in complex operations that care about
1223 # filtering.
1225 # filtering.
1224 #
1226 #
1225 # This fallback is a cheap and dirty fix that prevent several
1227 # This fallback is a cheap and dirty fix that prevent several
1226 # crashes. It does not ensure the behavior is correct. However the
1228 # crashes. It does not ensure the behavior is correct. However the
1227 # behavior was not correct before filtering either and "incorrect
1229 # behavior was not correct before filtering either and "incorrect
1228 # behavior" is seen as better as "crash"
1230 # behavior" is seen as better as "crash"
1229 #
1231 #
1230 # Linkrevs have several serious troubles with filtering that are
1232 # Linkrevs have several serious troubles with filtering that are
1231 # complicated to solve. Proper handling of the issue here should be
1233 # complicated to solve. Proper handling of the issue here should be
1232 # considered when solving linkrev issue are on the table.
1234 # considered when solving linkrev issue are on the table.
1233 return changectx(self._repo.unfiltered(), self._changeid)
1235 return changectx(self._repo.unfiltered(), self._changeid)
1234
1236
1235 def filectx(self, fileid, changeid=None):
1237 def filectx(self, fileid, changeid=None):
1236 '''opens an arbitrary revision of the file without
1238 '''opens an arbitrary revision of the file without
1237 opening a new filelog'''
1239 opening a new filelog'''
1238 return filectx(self._repo, self._path, fileid=fileid,
1240 return filectx(self._repo, self._path, fileid=fileid,
1239 filelog=self._filelog, changeid=changeid)
1241 filelog=self._filelog, changeid=changeid)
1240
1242
1241 def rawdata(self):
1243 def rawdata(self):
1242 return self._filelog.revision(self._filenode, raw=True)
1244 return self._filelog.revision(self._filenode, raw=True)
1243
1245
1244 def rawflags(self):
1246 def rawflags(self):
1245 """low-level revlog flags"""
1247 """low-level revlog flags"""
1246 return self._filelog.flags(self._filerev)
1248 return self._filelog.flags(self._filerev)
1247
1249
1248 def data(self):
1250 def data(self):
1249 try:
1251 try:
1250 return self._filelog.read(self._filenode)
1252 return self._filelog.read(self._filenode)
1251 except error.CensoredNodeError:
1253 except error.CensoredNodeError:
1252 if self._repo.ui.config("censor", "policy") == "ignore":
1254 if self._repo.ui.config("censor", "policy") == "ignore":
1253 return ""
1255 return ""
1254 raise error.Abort(_("censored node: %s") % short(self._filenode),
1256 raise error.Abort(_("censored node: %s") % short(self._filenode),
1255 hint=_("set censor.policy to ignore errors"))
1257 hint=_("set censor.policy to ignore errors"))
1256
1258
1257 def size(self):
1259 def size(self):
1258 return self._filelog.size(self._filerev)
1260 return self._filelog.size(self._filerev)
1259
1261
1260 @propertycache
1262 @propertycache
1261 def _copied(self):
1263 def _copied(self):
1262 """check if file was actually renamed in this changeset revision
1264 """check if file was actually renamed in this changeset revision
1263
1265
1264 If rename logged in file revision, we report copy for changeset only
1266 If rename logged in file revision, we report copy for changeset only
1265 if file revisions linkrev points back to the changeset in question
1267 if file revisions linkrev points back to the changeset in question
1266 or both changeset parents contain different file revisions.
1268 or both changeset parents contain different file revisions.
1267 """
1269 """
1268
1270
1269 renamed = self._filelog.renamed(self._filenode)
1271 renamed = self._filelog.renamed(self._filenode)
1270 if not renamed:
1272 if not renamed:
1271 return renamed
1273 return renamed
1272
1274
1273 if self.rev() == self.linkrev():
1275 if self.rev() == self.linkrev():
1274 return renamed
1276 return renamed
1275
1277
1276 name = self.path()
1278 name = self.path()
1277 fnode = self._filenode
1279 fnode = self._filenode
1278 for p in self._changectx.parents():
1280 for p in self._changectx.parents():
1279 try:
1281 try:
1280 if fnode == p.filenode(name):
1282 if fnode == p.filenode(name):
1281 return None
1283 return None
1282 except error.LookupError:
1284 except error.LookupError:
1283 pass
1285 pass
1284 return renamed
1286 return renamed
1285
1287
1286 def children(self):
1288 def children(self):
1287 # hard for renames
1289 # hard for renames
1288 c = self._filelog.children(self._filenode)
1290 c = self._filelog.children(self._filenode)
1289 return [filectx(self._repo, self._path, fileid=x,
1291 return [filectx(self._repo, self._path, fileid=x,
1290 filelog=self._filelog) for x in c]
1292 filelog=self._filelog) for x in c]
1291
1293
1292 class committablectx(basectx):
1294 class committablectx(basectx):
1293 """A committablectx object provides common functionality for a context that
1295 """A committablectx object provides common functionality for a context that
1294 wants the ability to commit, e.g. workingctx or memctx."""
1296 wants the ability to commit, e.g. workingctx or memctx."""
1295 def __init__(self, repo, text="", user=None, date=None, extra=None,
1297 def __init__(self, repo, text="", user=None, date=None, extra=None,
1296 changes=None):
1298 changes=None):
1297 self._repo = repo
1299 self._repo = repo
1298 self._rev = None
1300 self._rev = None
1299 self._node = None
1301 self._node = None
1300 self._text = text
1302 self._text = text
1301 if date:
1303 if date:
1302 self._date = util.parsedate(date)
1304 self._date = util.parsedate(date)
1303 if user:
1305 if user:
1304 self._user = user
1306 self._user = user
1305 if changes:
1307 if changes:
1306 self._status = changes
1308 self._status = changes
1307
1309
1308 self._extra = {}
1310 self._extra = {}
1309 if extra:
1311 if extra:
1310 self._extra = extra.copy()
1312 self._extra = extra.copy()
1311 if 'branch' not in self._extra:
1313 if 'branch' not in self._extra:
1312 try:
1314 try:
1313 branch = encoding.fromlocal(self._repo.dirstate.branch())
1315 branch = encoding.fromlocal(self._repo.dirstate.branch())
1314 except UnicodeDecodeError:
1316 except UnicodeDecodeError:
1315 raise error.Abort(_('branch name not in UTF-8!'))
1317 raise error.Abort(_('branch name not in UTF-8!'))
1316 self._extra['branch'] = branch
1318 self._extra['branch'] = branch
1317 if self._extra['branch'] == '':
1319 if self._extra['branch'] == '':
1318 self._extra['branch'] = 'default'
1320 self._extra['branch'] = 'default'
1319
1321
1320 def __bytes__(self):
1322 def __bytes__(self):
1321 return bytes(self._parents[0]) + "+"
1323 return bytes(self._parents[0]) + "+"
1322
1324
1323 __str__ = encoding.strmethod(__bytes__)
1325 __str__ = encoding.strmethod(__bytes__)
1324
1326
1325 def __nonzero__(self):
1327 def __nonzero__(self):
1326 return True
1328 return True
1327
1329
1328 __bool__ = __nonzero__
1330 __bool__ = __nonzero__
1329
1331
1330 def _buildflagfunc(self):
1332 def _buildflagfunc(self):
1331 # Create a fallback function for getting file flags when the
1333 # Create a fallback function for getting file flags when the
1332 # filesystem doesn't support them
1334 # filesystem doesn't support them
1333
1335
1334 copiesget = self._repo.dirstate.copies().get
1336 copiesget = self._repo.dirstate.copies().get
1335 parents = self.parents()
1337 parents = self.parents()
1336 if len(parents) < 2:
1338 if len(parents) < 2:
1337 # when we have one parent, it's easy: copy from parent
1339 # when we have one parent, it's easy: copy from parent
1338 man = parents[0].manifest()
1340 man = parents[0].manifest()
1339 def func(f):
1341 def func(f):
1340 f = copiesget(f, f)
1342 f = copiesget(f, f)
1341 return man.flags(f)
1343 return man.flags(f)
1342 else:
1344 else:
1343 # merges are tricky: we try to reconstruct the unstored
1345 # merges are tricky: we try to reconstruct the unstored
1344 # result from the merge (issue1802)
1346 # result from the merge (issue1802)
1345 p1, p2 = parents
1347 p1, p2 = parents
1346 pa = p1.ancestor(p2)
1348 pa = p1.ancestor(p2)
1347 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1349 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1348
1350
1349 def func(f):
1351 def func(f):
1350 f = copiesget(f, f) # may be wrong for merges with copies
1352 f = copiesget(f, f) # may be wrong for merges with copies
1351 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1353 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1352 if fl1 == fl2:
1354 if fl1 == fl2:
1353 return fl1
1355 return fl1
1354 if fl1 == fla:
1356 if fl1 == fla:
1355 return fl2
1357 return fl2
1356 if fl2 == fla:
1358 if fl2 == fla:
1357 return fl1
1359 return fl1
1358 return '' # punt for conflicts
1360 return '' # punt for conflicts
1359
1361
1360 return func
1362 return func
1361
1363
1362 @propertycache
1364 @propertycache
1363 def _flagfunc(self):
1365 def _flagfunc(self):
1364 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1366 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1365
1367
1366 @propertycache
1368 @propertycache
1367 def _status(self):
1369 def _status(self):
1368 return self._repo.status()
1370 return self._repo.status()
1369
1371
1370 @propertycache
1372 @propertycache
1371 def _user(self):
1373 def _user(self):
1372 return self._repo.ui.username()
1374 return self._repo.ui.username()
1373
1375
1374 @propertycache
1376 @propertycache
1375 def _date(self):
1377 def _date(self):
1376 ui = self._repo.ui
1378 ui = self._repo.ui
1377 date = ui.configdate('devel', 'default-date')
1379 date = ui.configdate('devel', 'default-date')
1378 if date is None:
1380 if date is None:
1379 date = util.makedate()
1381 date = util.makedate()
1380 return date
1382 return date
1381
1383
1382 def subrev(self, subpath):
1384 def subrev(self, subpath):
1383 return None
1385 return None
1384
1386
1385 def manifestnode(self):
1387 def manifestnode(self):
1386 return None
1388 return None
1387 def user(self):
1389 def user(self):
1388 return self._user or self._repo.ui.username()
1390 return self._user or self._repo.ui.username()
1389 def date(self):
1391 def date(self):
1390 return self._date
1392 return self._date
1391 def description(self):
1393 def description(self):
1392 return self._text
1394 return self._text
1393 def files(self):
1395 def files(self):
1394 return sorted(self._status.modified + self._status.added +
1396 return sorted(self._status.modified + self._status.added +
1395 self._status.removed)
1397 self._status.removed)
1396
1398
1397 def modified(self):
1399 def modified(self):
1398 return self._status.modified
1400 return self._status.modified
1399 def added(self):
1401 def added(self):
1400 return self._status.added
1402 return self._status.added
1401 def removed(self):
1403 def removed(self):
1402 return self._status.removed
1404 return self._status.removed
1403 def deleted(self):
1405 def deleted(self):
1404 return self._status.deleted
1406 return self._status.deleted
1405 def branch(self):
1407 def branch(self):
1406 return encoding.tolocal(self._extra['branch'])
1408 return encoding.tolocal(self._extra['branch'])
1407 def closesbranch(self):
1409 def closesbranch(self):
1408 return 'close' in self._extra
1410 return 'close' in self._extra
1409 def extra(self):
1411 def extra(self):
1410 return self._extra
1412 return self._extra
1411
1413
1412 def isinmemory(self):
1414 def isinmemory(self):
1413 return False
1415 return False
1414
1416
1415 def tags(self):
1417 def tags(self):
1416 return []
1418 return []
1417
1419
1418 def bookmarks(self):
1420 def bookmarks(self):
1419 b = []
1421 b = []
1420 for p in self.parents():
1422 for p in self.parents():
1421 b.extend(p.bookmarks())
1423 b.extend(p.bookmarks())
1422 return b
1424 return b
1423
1425
1424 def phase(self):
1426 def phase(self):
1425 phase = phases.draft # default phase to draft
1427 phase = phases.draft # default phase to draft
1426 for p in self.parents():
1428 for p in self.parents():
1427 phase = max(phase, p.phase())
1429 phase = max(phase, p.phase())
1428 return phase
1430 return phase
1429
1431
1430 def hidden(self):
1432 def hidden(self):
1431 return False
1433 return False
1432
1434
1433 def children(self):
1435 def children(self):
1434 return []
1436 return []
1435
1437
1436 def flags(self, path):
1438 def flags(self, path):
1437 if r'_manifest' in self.__dict__:
1439 if r'_manifest' in self.__dict__:
1438 try:
1440 try:
1439 return self._manifest.flags(path)
1441 return self._manifest.flags(path)
1440 except KeyError:
1442 except KeyError:
1441 return ''
1443 return ''
1442
1444
1443 try:
1445 try:
1444 return self._flagfunc(path)
1446 return self._flagfunc(path)
1445 except OSError:
1447 except OSError:
1446 return ''
1448 return ''
1447
1449
1448 def ancestor(self, c2):
1450 def ancestor(self, c2):
1449 """return the "best" ancestor context of self and c2"""
1451 """return the "best" ancestor context of self and c2"""
1450 return self._parents[0].ancestor(c2) # punt on two parents for now
1452 return self._parents[0].ancestor(c2) # punt on two parents for now
1451
1453
1452 def walk(self, match):
1454 def walk(self, match):
1453 '''Generates matching file names.'''
1455 '''Generates matching file names.'''
1454 return sorted(self._repo.dirstate.walk(match,
1456 return sorted(self._repo.dirstate.walk(match,
1455 subrepos=sorted(self.substate),
1457 subrepos=sorted(self.substate),
1456 unknown=True, ignored=False))
1458 unknown=True, ignored=False))
1457
1459
1458 def matches(self, match):
1460 def matches(self, match):
1459 return sorted(self._repo.dirstate.matches(match))
1461 return sorted(self._repo.dirstate.matches(match))
1460
1462
1461 def ancestors(self):
1463 def ancestors(self):
1462 for p in self._parents:
1464 for p in self._parents:
1463 yield p
1465 yield p
1464 for a in self._repo.changelog.ancestors(
1466 for a in self._repo.changelog.ancestors(
1465 [p.rev() for p in self._parents]):
1467 [p.rev() for p in self._parents]):
1466 yield changectx(self._repo, a)
1468 yield changectx(self._repo, a)
1467
1469
1468 def markcommitted(self, node):
1470 def markcommitted(self, node):
1469 """Perform post-commit cleanup necessary after committing this ctx
1471 """Perform post-commit cleanup necessary after committing this ctx
1470
1472
1471 Specifically, this updates backing stores this working context
1473 Specifically, this updates backing stores this working context
1472 wraps to reflect the fact that the changes reflected by this
1474 wraps to reflect the fact that the changes reflected by this
1473 workingctx have been committed. For example, it marks
1475 workingctx have been committed. For example, it marks
1474 modified and added files as normal in the dirstate.
1476 modified and added files as normal in the dirstate.
1475
1477
1476 """
1478 """
1477
1479
1478 with self._repo.dirstate.parentchange():
1480 with self._repo.dirstate.parentchange():
1479 for f in self.modified() + self.added():
1481 for f in self.modified() + self.added():
1480 self._repo.dirstate.normal(f)
1482 self._repo.dirstate.normal(f)
1481 for f in self.removed():
1483 for f in self.removed():
1482 self._repo.dirstate.drop(f)
1484 self._repo.dirstate.drop(f)
1483 self._repo.dirstate.setparents(node)
1485 self._repo.dirstate.setparents(node)
1484
1486
1485 # write changes out explicitly, because nesting wlock at
1487 # write changes out explicitly, because nesting wlock at
1486 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1488 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1487 # from immediately doing so for subsequent changing files
1489 # from immediately doing so for subsequent changing files
1488 self._repo.dirstate.write(self._repo.currenttransaction())
1490 self._repo.dirstate.write(self._repo.currenttransaction())
1489
1491
1490 def dirty(self, missing=False, merge=True, branch=True):
1492 def dirty(self, missing=False, merge=True, branch=True):
1491 return False
1493 return False
1492
1494
1493 class workingctx(committablectx):
1495 class workingctx(committablectx):
1494 """A workingctx object makes access to data related to
1496 """A workingctx object makes access to data related to
1495 the current working directory convenient.
1497 the current working directory convenient.
1496 date - any valid date string or (unixtime, offset), or None.
1498 date - any valid date string or (unixtime, offset), or None.
1497 user - username string, or None.
1499 user - username string, or None.
1498 extra - a dictionary of extra values, or None.
1500 extra - a dictionary of extra values, or None.
1499 changes - a list of file lists as returned by localrepo.status()
1501 changes - a list of file lists as returned by localrepo.status()
1500 or None to use the repository status.
1502 or None to use the repository status.
1501 """
1503 """
1502 def __init__(self, repo, text="", user=None, date=None, extra=None,
1504 def __init__(self, repo, text="", user=None, date=None, extra=None,
1503 changes=None):
1505 changes=None):
1504 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1506 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1505
1507
1506 def __iter__(self):
1508 def __iter__(self):
1507 d = self._repo.dirstate
1509 d = self._repo.dirstate
1508 for f in d:
1510 for f in d:
1509 if d[f] != 'r':
1511 if d[f] != 'r':
1510 yield f
1512 yield f
1511
1513
1512 def __contains__(self, key):
1514 def __contains__(self, key):
1513 return self._repo.dirstate[key] not in "?r"
1515 return self._repo.dirstate[key] not in "?r"
1514
1516
1515 def hex(self):
1517 def hex(self):
1516 return hex(wdirid)
1518 return hex(wdirid)
1517
1519
1518 @propertycache
1520 @propertycache
1519 def _parents(self):
1521 def _parents(self):
1520 p = self._repo.dirstate.parents()
1522 p = self._repo.dirstate.parents()
1521 if p[1] == nullid:
1523 if p[1] == nullid:
1522 p = p[:-1]
1524 p = p[:-1]
1523 return [changectx(self._repo, x) for x in p]
1525 return [changectx(self._repo, x) for x in p]
1524
1526
1525 def filectx(self, path, filelog=None):
1527 def filectx(self, path, filelog=None):
1526 """get a file context from the working directory"""
1528 """get a file context from the working directory"""
1527 return workingfilectx(self._repo, path, workingctx=self,
1529 return workingfilectx(self._repo, path, workingctx=self,
1528 filelog=filelog)
1530 filelog=filelog)
1529
1531
1530 def dirty(self, missing=False, merge=True, branch=True):
1532 def dirty(self, missing=False, merge=True, branch=True):
1531 "check whether a working directory is modified"
1533 "check whether a working directory is modified"
1532 # check subrepos first
1534 # check subrepos first
1533 for s in sorted(self.substate):
1535 for s in sorted(self.substate):
1534 if self.sub(s).dirty(missing=missing):
1536 if self.sub(s).dirty(missing=missing):
1535 return True
1537 return True
1536 # check current working dir
1538 # check current working dir
1537 return ((merge and self.p2()) or
1539 return ((merge and self.p2()) or
1538 (branch and self.branch() != self.p1().branch()) or
1540 (branch and self.branch() != self.p1().branch()) or
1539 self.modified() or self.added() or self.removed() or
1541 self.modified() or self.added() or self.removed() or
1540 (missing and self.deleted()))
1542 (missing and self.deleted()))
1541
1543
1542 def add(self, list, prefix=""):
1544 def add(self, list, prefix=""):
1543 with self._repo.wlock():
1545 with self._repo.wlock():
1544 ui, ds = self._repo.ui, self._repo.dirstate
1546 ui, ds = self._repo.ui, self._repo.dirstate
1545 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1547 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1546 rejected = []
1548 rejected = []
1547 lstat = self._repo.wvfs.lstat
1549 lstat = self._repo.wvfs.lstat
1548 for f in list:
1550 for f in list:
1549 # ds.pathto() returns an absolute file when this is invoked from
1551 # ds.pathto() returns an absolute file when this is invoked from
1550 # the keyword extension. That gets flagged as non-portable on
1552 # the keyword extension. That gets flagged as non-portable on
1551 # Windows, since it contains the drive letter and colon.
1553 # Windows, since it contains the drive letter and colon.
1552 scmutil.checkportable(ui, os.path.join(prefix, f))
1554 scmutil.checkportable(ui, os.path.join(prefix, f))
1553 try:
1555 try:
1554 st = lstat(f)
1556 st = lstat(f)
1555 except OSError:
1557 except OSError:
1556 ui.warn(_("%s does not exist!\n") % uipath(f))
1558 ui.warn(_("%s does not exist!\n") % uipath(f))
1557 rejected.append(f)
1559 rejected.append(f)
1558 continue
1560 continue
1559 if st.st_size > 10000000:
1561 if st.st_size > 10000000:
1560 ui.warn(_("%s: up to %d MB of RAM may be required "
1562 ui.warn(_("%s: up to %d MB of RAM may be required "
1561 "to manage this file\n"
1563 "to manage this file\n"
1562 "(use 'hg revert %s' to cancel the "
1564 "(use 'hg revert %s' to cancel the "
1563 "pending addition)\n")
1565 "pending addition)\n")
1564 % (f, 3 * st.st_size // 1000000, uipath(f)))
1566 % (f, 3 * st.st_size // 1000000, uipath(f)))
1565 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1567 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1566 ui.warn(_("%s not added: only files and symlinks "
1568 ui.warn(_("%s not added: only files and symlinks "
1567 "supported currently\n") % uipath(f))
1569 "supported currently\n") % uipath(f))
1568 rejected.append(f)
1570 rejected.append(f)
1569 elif ds[f] in 'amn':
1571 elif ds[f] in 'amn':
1570 ui.warn(_("%s already tracked!\n") % uipath(f))
1572 ui.warn(_("%s already tracked!\n") % uipath(f))
1571 elif ds[f] == 'r':
1573 elif ds[f] == 'r':
1572 ds.normallookup(f)
1574 ds.normallookup(f)
1573 else:
1575 else:
1574 ds.add(f)
1576 ds.add(f)
1575 return rejected
1577 return rejected
1576
1578
1577 def forget(self, files, prefix=""):
1579 def forget(self, files, prefix=""):
1578 with self._repo.wlock():
1580 with self._repo.wlock():
1579 ds = self._repo.dirstate
1581 ds = self._repo.dirstate
1580 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1582 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1581 rejected = []
1583 rejected = []
1582 for f in files:
1584 for f in files:
1583 if f not in self._repo.dirstate:
1585 if f not in self._repo.dirstate:
1584 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1586 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1585 rejected.append(f)
1587 rejected.append(f)
1586 elif self._repo.dirstate[f] != 'a':
1588 elif self._repo.dirstate[f] != 'a':
1587 self._repo.dirstate.remove(f)
1589 self._repo.dirstate.remove(f)
1588 else:
1590 else:
1589 self._repo.dirstate.drop(f)
1591 self._repo.dirstate.drop(f)
1590 return rejected
1592 return rejected
1591
1593
1592 def undelete(self, list):
1594 def undelete(self, list):
1593 pctxs = self.parents()
1595 pctxs = self.parents()
1594 with self._repo.wlock():
1596 with self._repo.wlock():
1595 ds = self._repo.dirstate
1597 ds = self._repo.dirstate
1596 for f in list:
1598 for f in list:
1597 if self._repo.dirstate[f] != 'r':
1599 if self._repo.dirstate[f] != 'r':
1598 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1600 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1599 else:
1601 else:
1600 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1602 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1601 t = fctx.data()
1603 t = fctx.data()
1602 self._repo.wwrite(f, t, fctx.flags())
1604 self._repo.wwrite(f, t, fctx.flags())
1603 self._repo.dirstate.normal(f)
1605 self._repo.dirstate.normal(f)
1604
1606
1605 def copy(self, source, dest):
1607 def copy(self, source, dest):
1606 try:
1608 try:
1607 st = self._repo.wvfs.lstat(dest)
1609 st = self._repo.wvfs.lstat(dest)
1608 except OSError as err:
1610 except OSError as err:
1609 if err.errno != errno.ENOENT:
1611 if err.errno != errno.ENOENT:
1610 raise
1612 raise
1611 self._repo.ui.warn(_("%s does not exist!\n")
1613 self._repo.ui.warn(_("%s does not exist!\n")
1612 % self._repo.dirstate.pathto(dest))
1614 % self._repo.dirstate.pathto(dest))
1613 return
1615 return
1614 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1616 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1615 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1617 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1616 "symbolic link\n")
1618 "symbolic link\n")
1617 % self._repo.dirstate.pathto(dest))
1619 % self._repo.dirstate.pathto(dest))
1618 else:
1620 else:
1619 with self._repo.wlock():
1621 with self._repo.wlock():
1620 if self._repo.dirstate[dest] in '?':
1622 if self._repo.dirstate[dest] in '?':
1621 self._repo.dirstate.add(dest)
1623 self._repo.dirstate.add(dest)
1622 elif self._repo.dirstate[dest] in 'r':
1624 elif self._repo.dirstate[dest] in 'r':
1623 self._repo.dirstate.normallookup(dest)
1625 self._repo.dirstate.normallookup(dest)
1624 self._repo.dirstate.copy(source, dest)
1626 self._repo.dirstate.copy(source, dest)
1625
1627
1626 def match(self, pats=None, include=None, exclude=None, default='glob',
1628 def match(self, pats=None, include=None, exclude=None, default='glob',
1627 listsubrepos=False, badfn=None):
1629 listsubrepos=False, badfn=None):
1628 r = self._repo
1630 r = self._repo
1629
1631
1630 # Only a case insensitive filesystem needs magic to translate user input
1632 # Only a case insensitive filesystem needs magic to translate user input
1631 # to actual case in the filesystem.
1633 # to actual case in the filesystem.
1632 icasefs = not util.fscasesensitive(r.root)
1634 icasefs = not util.fscasesensitive(r.root)
1633 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1635 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1634 default, auditor=r.auditor, ctx=self,
1636 default, auditor=r.auditor, ctx=self,
1635 listsubrepos=listsubrepos, badfn=badfn,
1637 listsubrepos=listsubrepos, badfn=badfn,
1636 icasefs=icasefs)
1638 icasefs=icasefs)
1637
1639
1638 def flushall(self):
1640 def flushall(self):
1639 pass # For overlayworkingfilectx compatibility.
1641 pass # For overlayworkingfilectx compatibility.
1640
1642
1641 def _filtersuspectsymlink(self, files):
1643 def _filtersuspectsymlink(self, files):
1642 if not files or self._repo.dirstate._checklink:
1644 if not files or self._repo.dirstate._checklink:
1643 return files
1645 return files
1644
1646
1645 # Symlink placeholders may get non-symlink-like contents
1647 # Symlink placeholders may get non-symlink-like contents
1646 # via user error or dereferencing by NFS or Samba servers,
1648 # via user error or dereferencing by NFS or Samba servers,
1647 # so we filter out any placeholders that don't look like a
1649 # so we filter out any placeholders that don't look like a
1648 # symlink
1650 # symlink
1649 sane = []
1651 sane = []
1650 for f in files:
1652 for f in files:
1651 if self.flags(f) == 'l':
1653 if self.flags(f) == 'l':
1652 d = self[f].data()
1654 d = self[f].data()
1653 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1655 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1654 self._repo.ui.debug('ignoring suspect symlink placeholder'
1656 self._repo.ui.debug('ignoring suspect symlink placeholder'
1655 ' "%s"\n' % f)
1657 ' "%s"\n' % f)
1656 continue
1658 continue
1657 sane.append(f)
1659 sane.append(f)
1658 return sane
1660 return sane
1659
1661
1660 def _checklookup(self, files):
1662 def _checklookup(self, files):
1661 # check for any possibly clean files
1663 # check for any possibly clean files
1662 if not files:
1664 if not files:
1663 return [], [], []
1665 return [], [], []
1664
1666
1665 modified = []
1667 modified = []
1666 deleted = []
1668 deleted = []
1667 fixup = []
1669 fixup = []
1668 pctx = self._parents[0]
1670 pctx = self._parents[0]
1669 # do a full compare of any files that might have changed
1671 # do a full compare of any files that might have changed
1670 for f in sorted(files):
1672 for f in sorted(files):
1671 try:
1673 try:
1672 # This will return True for a file that got replaced by a
1674 # This will return True for a file that got replaced by a
1673 # directory in the interim, but fixing that is pretty hard.
1675 # directory in the interim, but fixing that is pretty hard.
1674 if (f not in pctx or self.flags(f) != pctx.flags(f)
1676 if (f not in pctx or self.flags(f) != pctx.flags(f)
1675 or pctx[f].cmp(self[f])):
1677 or pctx[f].cmp(self[f])):
1676 modified.append(f)
1678 modified.append(f)
1677 else:
1679 else:
1678 fixup.append(f)
1680 fixup.append(f)
1679 except (IOError, OSError):
1681 except (IOError, OSError):
1680 # A file become inaccessible in between? Mark it as deleted,
1682 # A file become inaccessible in between? Mark it as deleted,
1681 # matching dirstate behavior (issue5584).
1683 # matching dirstate behavior (issue5584).
1682 # The dirstate has more complex behavior around whether a
1684 # The dirstate has more complex behavior around whether a
1683 # missing file matches a directory, etc, but we don't need to
1685 # missing file matches a directory, etc, but we don't need to
1684 # bother with that: if f has made it to this point, we're sure
1686 # bother with that: if f has made it to this point, we're sure
1685 # it's in the dirstate.
1687 # it's in the dirstate.
1686 deleted.append(f)
1688 deleted.append(f)
1687
1689
1688 return modified, deleted, fixup
1690 return modified, deleted, fixup
1689
1691
1690 def _poststatusfixup(self, status, fixup):
1692 def _poststatusfixup(self, status, fixup):
1691 """update dirstate for files that are actually clean"""
1693 """update dirstate for files that are actually clean"""
1692 poststatus = self._repo.postdsstatus()
1694 poststatus = self._repo.postdsstatus()
1693 if fixup or poststatus:
1695 if fixup or poststatus:
1694 try:
1696 try:
1695 oldid = self._repo.dirstate.identity()
1697 oldid = self._repo.dirstate.identity()
1696
1698
1697 # updating the dirstate is optional
1699 # updating the dirstate is optional
1698 # so we don't wait on the lock
1700 # so we don't wait on the lock
1699 # wlock can invalidate the dirstate, so cache normal _after_
1701 # wlock can invalidate the dirstate, so cache normal _after_
1700 # taking the lock
1702 # taking the lock
1701 with self._repo.wlock(False):
1703 with self._repo.wlock(False):
1702 if self._repo.dirstate.identity() == oldid:
1704 if self._repo.dirstate.identity() == oldid:
1703 if fixup:
1705 if fixup:
1704 normal = self._repo.dirstate.normal
1706 normal = self._repo.dirstate.normal
1705 for f in fixup:
1707 for f in fixup:
1706 normal(f)
1708 normal(f)
1707 # write changes out explicitly, because nesting
1709 # write changes out explicitly, because nesting
1708 # wlock at runtime may prevent 'wlock.release()'
1710 # wlock at runtime may prevent 'wlock.release()'
1709 # after this block from doing so for subsequent
1711 # after this block from doing so for subsequent
1710 # changing files
1712 # changing files
1711 tr = self._repo.currenttransaction()
1713 tr = self._repo.currenttransaction()
1712 self._repo.dirstate.write(tr)
1714 self._repo.dirstate.write(tr)
1713
1715
1714 if poststatus:
1716 if poststatus:
1715 for ps in poststatus:
1717 for ps in poststatus:
1716 ps(self, status)
1718 ps(self, status)
1717 else:
1719 else:
1718 # in this case, writing changes out breaks
1720 # in this case, writing changes out breaks
1719 # consistency, because .hg/dirstate was
1721 # consistency, because .hg/dirstate was
1720 # already changed simultaneously after last
1722 # already changed simultaneously after last
1721 # caching (see also issue5584 for detail)
1723 # caching (see also issue5584 for detail)
1722 self._repo.ui.debug('skip updating dirstate: '
1724 self._repo.ui.debug('skip updating dirstate: '
1723 'identity mismatch\n')
1725 'identity mismatch\n')
1724 except error.LockError:
1726 except error.LockError:
1725 pass
1727 pass
1726 finally:
1728 finally:
1727 # Even if the wlock couldn't be grabbed, clear out the list.
1729 # Even if the wlock couldn't be grabbed, clear out the list.
1728 self._repo.clearpostdsstatus()
1730 self._repo.clearpostdsstatus()
1729
1731
1730 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1732 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1731 '''Gets the status from the dirstate -- internal use only.'''
1733 '''Gets the status from the dirstate -- internal use only.'''
1732 subrepos = []
1734 subrepos = []
1733 if '.hgsub' in self:
1735 if '.hgsub' in self:
1734 subrepos = sorted(self.substate)
1736 subrepos = sorted(self.substate)
1735 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1737 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1736 clean=clean, unknown=unknown)
1738 clean=clean, unknown=unknown)
1737
1739
1738 # check for any possibly clean files
1740 # check for any possibly clean files
1739 fixup = []
1741 fixup = []
1740 if cmp:
1742 if cmp:
1741 modified2, deleted2, fixup = self._checklookup(cmp)
1743 modified2, deleted2, fixup = self._checklookup(cmp)
1742 s.modified.extend(modified2)
1744 s.modified.extend(modified2)
1743 s.deleted.extend(deleted2)
1745 s.deleted.extend(deleted2)
1744
1746
1745 if fixup and clean:
1747 if fixup and clean:
1746 s.clean.extend(fixup)
1748 s.clean.extend(fixup)
1747
1749
1748 self._poststatusfixup(s, fixup)
1750 self._poststatusfixup(s, fixup)
1749
1751
1750 if match.always():
1752 if match.always():
1751 # cache for performance
1753 # cache for performance
1752 if s.unknown or s.ignored or s.clean:
1754 if s.unknown or s.ignored or s.clean:
1753 # "_status" is cached with list*=False in the normal route
1755 # "_status" is cached with list*=False in the normal route
1754 self._status = scmutil.status(s.modified, s.added, s.removed,
1756 self._status = scmutil.status(s.modified, s.added, s.removed,
1755 s.deleted, [], [], [])
1757 s.deleted, [], [], [])
1756 else:
1758 else:
1757 self._status = s
1759 self._status = s
1758
1760
1759 return s
1761 return s
1760
1762
1761 @propertycache
1763 @propertycache
1762 def _manifest(self):
1764 def _manifest(self):
1763 """generate a manifest corresponding to the values in self._status
1765 """generate a manifest corresponding to the values in self._status
1764
1766
1765 This reuse the file nodeid from parent, but we use special node
1767 This reuse the file nodeid from parent, but we use special node
1766 identifiers for added and modified files. This is used by manifests
1768 identifiers for added and modified files. This is used by manifests
1767 merge to see that files are different and by update logic to avoid
1769 merge to see that files are different and by update logic to avoid
1768 deleting newly added files.
1770 deleting newly added files.
1769 """
1771 """
1770 return self._buildstatusmanifest(self._status)
1772 return self._buildstatusmanifest(self._status)
1771
1773
1772 def _buildstatusmanifest(self, status):
1774 def _buildstatusmanifest(self, status):
1773 """Builds a manifest that includes the given status results."""
1775 """Builds a manifest that includes the given status results."""
1774 parents = self.parents()
1776 parents = self.parents()
1775
1777
1776 man = parents[0].manifest().copy()
1778 man = parents[0].manifest().copy()
1777
1779
1778 ff = self._flagfunc
1780 ff = self._flagfunc
1779 for i, l in ((addednodeid, status.added),
1781 for i, l in ((addednodeid, status.added),
1780 (modifiednodeid, status.modified)):
1782 (modifiednodeid, status.modified)):
1781 for f in l:
1783 for f in l:
1782 man[f] = i
1784 man[f] = i
1783 try:
1785 try:
1784 man.setflag(f, ff(f))
1786 man.setflag(f, ff(f))
1785 except OSError:
1787 except OSError:
1786 pass
1788 pass
1787
1789
1788 for f in status.deleted + status.removed:
1790 for f in status.deleted + status.removed:
1789 if f in man:
1791 if f in man:
1790 del man[f]
1792 del man[f]
1791
1793
1792 return man
1794 return man
1793
1795
1794 def _buildstatus(self, other, s, match, listignored, listclean,
1796 def _buildstatus(self, other, s, match, listignored, listclean,
1795 listunknown):
1797 listunknown):
1796 """build a status with respect to another context
1798 """build a status with respect to another context
1797
1799
1798 This includes logic for maintaining the fast path of status when
1800 This includes logic for maintaining the fast path of status when
1799 comparing the working directory against its parent, which is to skip
1801 comparing the working directory against its parent, which is to skip
1800 building a new manifest if self (working directory) is not comparing
1802 building a new manifest if self (working directory) is not comparing
1801 against its parent (repo['.']).
1803 against its parent (repo['.']).
1802 """
1804 """
1803 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1805 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1804 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1806 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1805 # might have accidentally ended up with the entire contents of the file
1807 # might have accidentally ended up with the entire contents of the file
1806 # they are supposed to be linking to.
1808 # they are supposed to be linking to.
1807 s.modified[:] = self._filtersuspectsymlink(s.modified)
1809 s.modified[:] = self._filtersuspectsymlink(s.modified)
1808 if other != self._repo['.']:
1810 if other != self._repo['.']:
1809 s = super(workingctx, self)._buildstatus(other, s, match,
1811 s = super(workingctx, self)._buildstatus(other, s, match,
1810 listignored, listclean,
1812 listignored, listclean,
1811 listunknown)
1813 listunknown)
1812 return s
1814 return s
1813
1815
1814 def _matchstatus(self, other, match):
1816 def _matchstatus(self, other, match):
1815 """override the match method with a filter for directory patterns
1817 """override the match method with a filter for directory patterns
1816
1818
1817 We use inheritance to customize the match.bad method only in cases of
1819 We use inheritance to customize the match.bad method only in cases of
1818 workingctx since it belongs only to the working directory when
1820 workingctx since it belongs only to the working directory when
1819 comparing against the parent changeset.
1821 comparing against the parent changeset.
1820
1822
1821 If we aren't comparing against the working directory's parent, then we
1823 If we aren't comparing against the working directory's parent, then we
1822 just use the default match object sent to us.
1824 just use the default match object sent to us.
1823 """
1825 """
1824 if other != self._repo['.']:
1826 if other != self._repo['.']:
1825 def bad(f, msg):
1827 def bad(f, msg):
1826 # 'f' may be a directory pattern from 'match.files()',
1828 # 'f' may be a directory pattern from 'match.files()',
1827 # so 'f not in ctx1' is not enough
1829 # so 'f not in ctx1' is not enough
1828 if f not in other and not other.hasdir(f):
1830 if f not in other and not other.hasdir(f):
1829 self._repo.ui.warn('%s: %s\n' %
1831 self._repo.ui.warn('%s: %s\n' %
1830 (self._repo.dirstate.pathto(f), msg))
1832 (self._repo.dirstate.pathto(f), msg))
1831 match.bad = bad
1833 match.bad = bad
1832 return match
1834 return match
1833
1835
1834 def markcommitted(self, node):
1836 def markcommitted(self, node):
1835 super(workingctx, self).markcommitted(node)
1837 super(workingctx, self).markcommitted(node)
1836
1838
1837 sparse.aftercommit(self._repo, node)
1839 sparse.aftercommit(self._repo, node)
1838
1840
1839 class committablefilectx(basefilectx):
1841 class committablefilectx(basefilectx):
1840 """A committablefilectx provides common functionality for a file context
1842 """A committablefilectx provides common functionality for a file context
1841 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1843 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1842 def __init__(self, repo, path, filelog=None, ctx=None):
1844 def __init__(self, repo, path, filelog=None, ctx=None):
1843 self._repo = repo
1845 self._repo = repo
1844 self._path = path
1846 self._path = path
1845 self._changeid = None
1847 self._changeid = None
1846 self._filerev = self._filenode = None
1848 self._filerev = self._filenode = None
1847
1849
1848 if filelog is not None:
1850 if filelog is not None:
1849 self._filelog = filelog
1851 self._filelog = filelog
1850 if ctx:
1852 if ctx:
1851 self._changectx = ctx
1853 self._changectx = ctx
1852
1854
1853 def __nonzero__(self):
1855 def __nonzero__(self):
1854 return True
1856 return True
1855
1857
1856 __bool__ = __nonzero__
1858 __bool__ = __nonzero__
1857
1859
1858 def linkrev(self):
1860 def linkrev(self):
1859 # linked to self._changectx no matter if file is modified or not
1861 # linked to self._changectx no matter if file is modified or not
1860 return self.rev()
1862 return self.rev()
1861
1863
1862 def parents(self):
1864 def parents(self):
1863 '''return parent filectxs, following copies if necessary'''
1865 '''return parent filectxs, following copies if necessary'''
1864 def filenode(ctx, path):
1866 def filenode(ctx, path):
1865 return ctx._manifest.get(path, nullid)
1867 return ctx._manifest.get(path, nullid)
1866
1868
1867 path = self._path
1869 path = self._path
1868 fl = self._filelog
1870 fl = self._filelog
1869 pcl = self._changectx._parents
1871 pcl = self._changectx._parents
1870 renamed = self.renamed()
1872 renamed = self.renamed()
1871
1873
1872 if renamed:
1874 if renamed:
1873 pl = [renamed + (None,)]
1875 pl = [renamed + (None,)]
1874 else:
1876 else:
1875 pl = [(path, filenode(pcl[0], path), fl)]
1877 pl = [(path, filenode(pcl[0], path), fl)]
1876
1878
1877 for pc in pcl[1:]:
1879 for pc in pcl[1:]:
1878 pl.append((path, filenode(pc, path), fl))
1880 pl.append((path, filenode(pc, path), fl))
1879
1881
1880 return [self._parentfilectx(p, fileid=n, filelog=l)
1882 return [self._parentfilectx(p, fileid=n, filelog=l)
1881 for p, n, l in pl if n != nullid]
1883 for p, n, l in pl if n != nullid]
1882
1884
1883 def children(self):
1885 def children(self):
1884 return []
1886 return []
1885
1887
1886 class workingfilectx(committablefilectx):
1888 class workingfilectx(committablefilectx):
1887 """A workingfilectx object makes access to data related to a particular
1889 """A workingfilectx object makes access to data related to a particular
1888 file in the working directory convenient."""
1890 file in the working directory convenient."""
1889 def __init__(self, repo, path, filelog=None, workingctx=None):
1891 def __init__(self, repo, path, filelog=None, workingctx=None):
1890 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1892 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1891
1893
1892 @propertycache
1894 @propertycache
1893 def _changectx(self):
1895 def _changectx(self):
1894 return workingctx(self._repo)
1896 return workingctx(self._repo)
1895
1897
1896 def data(self):
1898 def data(self):
1897 return self._repo.wread(self._path)
1899 return self._repo.wread(self._path)
1898 def renamed(self):
1900 def renamed(self):
1899 rp = self._repo.dirstate.copied(self._path)
1901 rp = self._repo.dirstate.copied(self._path)
1900 if not rp:
1902 if not rp:
1901 return None
1903 return None
1902 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1904 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1903
1905
1904 def size(self):
1906 def size(self):
1905 return self._repo.wvfs.lstat(self._path).st_size
1907 return self._repo.wvfs.lstat(self._path).st_size
1906 def date(self):
1908 def date(self):
1907 t, tz = self._changectx.date()
1909 t, tz = self._changectx.date()
1908 try:
1910 try:
1909 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1911 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1910 except OSError as err:
1912 except OSError as err:
1911 if err.errno != errno.ENOENT:
1913 if err.errno != errno.ENOENT:
1912 raise
1914 raise
1913 return (t, tz)
1915 return (t, tz)
1914
1916
1915 def exists(self):
1917 def exists(self):
1916 return self._repo.wvfs.exists(self._path)
1918 return self._repo.wvfs.exists(self._path)
1917
1919
1918 def lexists(self):
1920 def lexists(self):
1919 return self._repo.wvfs.lexists(self._path)
1921 return self._repo.wvfs.lexists(self._path)
1920
1922
1921 def audit(self):
1923 def audit(self):
1922 return self._repo.wvfs.audit(self._path)
1924 return self._repo.wvfs.audit(self._path)
1923
1925
1924 def cmp(self, fctx):
1926 def cmp(self, fctx):
1925 """compare with other file context
1927 """compare with other file context
1926
1928
1927 returns True if different than fctx.
1929 returns True if different than fctx.
1928 """
1930 """
1929 # fctx should be a filectx (not a workingfilectx)
1931 # fctx should be a filectx (not a workingfilectx)
1930 # invert comparison to reuse the same code path
1932 # invert comparison to reuse the same code path
1931 return fctx.cmp(self)
1933 return fctx.cmp(self)
1932
1934
1933 def remove(self, ignoremissing=False):
1935 def remove(self, ignoremissing=False):
1934 """wraps unlink for a repo's working directory"""
1936 """wraps unlink for a repo's working directory"""
1935 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1937 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1936
1938
1937 def write(self, data, flags, backgroundclose=False):
1939 def write(self, data, flags, backgroundclose=False):
1938 """wraps repo.wwrite"""
1940 """wraps repo.wwrite"""
1939 self._repo.wwrite(self._path, data, flags,
1941 self._repo.wwrite(self._path, data, flags,
1940 backgroundclose=backgroundclose)
1942 backgroundclose=backgroundclose)
1941
1943
1942 def markcopied(self, src):
1944 def markcopied(self, src):
1943 """marks this file a copy of `src`"""
1945 """marks this file a copy of `src`"""
1944 if self._repo.dirstate[self._path] in "nma":
1946 if self._repo.dirstate[self._path] in "nma":
1945 self._repo.dirstate.copy(src, self._path)
1947 self._repo.dirstate.copy(src, self._path)
1946
1948
1947 def clearunknown(self):
1949 def clearunknown(self):
1948 """Removes conflicting items in the working directory so that
1950 """Removes conflicting items in the working directory so that
1949 ``write()`` can be called successfully.
1951 ``write()`` can be called successfully.
1950 """
1952 """
1951 wvfs = self._repo.wvfs
1953 wvfs = self._repo.wvfs
1952 f = self._path
1954 f = self._path
1953 wvfs.audit(f)
1955 wvfs.audit(f)
1954 if wvfs.isdir(f) and not wvfs.islink(f):
1956 if wvfs.isdir(f) and not wvfs.islink(f):
1955 wvfs.rmtree(f, forcibly=True)
1957 wvfs.rmtree(f, forcibly=True)
1956 for p in reversed(list(util.finddirs(f))):
1958 for p in reversed(list(util.finddirs(f))):
1957 if wvfs.isfileorlink(p):
1959 if wvfs.isfileorlink(p):
1958 wvfs.unlink(p)
1960 wvfs.unlink(p)
1959 break
1961 break
1960
1962
1961 def setflags(self, l, x):
1963 def setflags(self, l, x):
1962 self._repo.wvfs.setflags(self._path, l, x)
1964 self._repo.wvfs.setflags(self._path, l, x)
1963
1965
1964 class overlayworkingctx(workingctx):
1966 class overlayworkingctx(workingctx):
1965 """Wraps another mutable context with a write-back cache that can be flushed
1967 """Wraps another mutable context with a write-back cache that can be flushed
1966 at a later time.
1968 at a later time.
1967
1969
1968 self._cache[path] maps to a dict with keys: {
1970 self._cache[path] maps to a dict with keys: {
1969 'exists': bool?
1971 'exists': bool?
1970 'date': date?
1972 'date': date?
1971 'data': str?
1973 'data': str?
1972 'flags': str?
1974 'flags': str?
1973 }
1975 }
1974 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1976 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1975 is `False`, the file was deleted.
1977 is `False`, the file was deleted.
1976 """
1978 """
1977
1979
1978 def __init__(self, repo, wrappedctx):
1980 def __init__(self, repo, wrappedctx):
1979 super(overlayworkingctx, self).__init__(repo)
1981 super(overlayworkingctx, self).__init__(repo)
1980 self._repo = repo
1982 self._repo = repo
1981 self._wrappedctx = wrappedctx
1983 self._wrappedctx = wrappedctx
1982 self._clean()
1984 self._clean()
1983
1985
1984 def data(self, path):
1986 def data(self, path):
1985 if self.isdirty(path):
1987 if self.isdirty(path):
1986 if self._cache[path]['exists']:
1988 if self._cache[path]['exists']:
1987 if self._cache[path]['data']:
1989 if self._cache[path]['data']:
1988 return self._cache[path]['data']
1990 return self._cache[path]['data']
1989 else:
1991 else:
1990 # Must fallback here, too, because we only set flags.
1992 # Must fallback here, too, because we only set flags.
1991 return self._wrappedctx[path].data()
1993 return self._wrappedctx[path].data()
1992 else:
1994 else:
1993 raise error.ProgrammingError("No such file or directory: %s" %
1995 raise error.ProgrammingError("No such file or directory: %s" %
1994 self._path)
1996 self._path)
1995 else:
1997 else:
1996 return self._wrappedctx[path].data()
1998 return self._wrappedctx[path].data()
1997
1999
1998 def isinmemory(self):
2000 def isinmemory(self):
1999 return True
2001 return True
2000
2002
2001 def filedate(self, path):
2003 def filedate(self, path):
2002 if self.isdirty(path):
2004 if self.isdirty(path):
2003 return self._cache[path]['date']
2005 return self._cache[path]['date']
2004 else:
2006 else:
2005 return self._wrappedctx[path].date()
2007 return self._wrappedctx[path].date()
2006
2008
2007 def flags(self, path):
2009 def flags(self, path):
2008 if self.isdirty(path):
2010 if self.isdirty(path):
2009 if self._cache[path]['exists']:
2011 if self._cache[path]['exists']:
2010 return self._cache[path]['flags']
2012 return self._cache[path]['flags']
2011 else:
2013 else:
2012 raise error.ProgrammingError("No such file or directory: %s" %
2014 raise error.ProgrammingError("No such file or directory: %s" %
2013 self._path)
2015 self._path)
2014 else:
2016 else:
2015 return self._wrappedctx[path].flags()
2017 return self._wrappedctx[path].flags()
2016
2018
2017 def write(self, path, data, flags=''):
2019 def write(self, path, data, flags=''):
2018 if data is None:
2020 if data is None:
2019 raise error.ProgrammingError("data must be non-None")
2021 raise error.ProgrammingError("data must be non-None")
2020 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2022 self._markdirty(path, exists=True, data=data, date=util.makedate(),
2021 flags=flags)
2023 flags=flags)
2022
2024
2023 def setflags(self, path, l, x):
2025 def setflags(self, path, l, x):
2024 self._markdirty(path, exists=True, date=util.makedate(),
2026 self._markdirty(path, exists=True, date=util.makedate(),
2025 flags=(l and 'l' or '') + (x and 'x' or ''))
2027 flags=(l and 'l' or '') + (x and 'x' or ''))
2026
2028
2027 def remove(self, path):
2029 def remove(self, path):
2028 self._markdirty(path, exists=False)
2030 self._markdirty(path, exists=False)
2029
2031
2030 def exists(self, path):
2032 def exists(self, path):
2031 """exists behaves like `lexists`, but needs to follow symlinks and
2033 """exists behaves like `lexists`, but needs to follow symlinks and
2032 return False if they are broken.
2034 return False if they are broken.
2033 """
2035 """
2034 if self.isdirty(path):
2036 if self.isdirty(path):
2035 # If this path exists and is a symlink, "follow" it by calling
2037 # If this path exists and is a symlink, "follow" it by calling
2036 # exists on the destination path.
2038 # exists on the destination path.
2037 if (self._cache[path]['exists'] and
2039 if (self._cache[path]['exists'] and
2038 'l' in self._cache[path]['flags']):
2040 'l' in self._cache[path]['flags']):
2039 return self.exists(self._cache[path]['data'].strip())
2041 return self.exists(self._cache[path]['data'].strip())
2040 else:
2042 else:
2041 return self._cache[path]['exists']
2043 return self._cache[path]['exists']
2042 return self._wrappedctx[path].exists()
2044 return self._wrappedctx[path].exists()
2043
2045
2044 def lexists(self, path):
2046 def lexists(self, path):
2045 """lexists returns True if the path exists"""
2047 """lexists returns True if the path exists"""
2046 if self.isdirty(path):
2048 if self.isdirty(path):
2047 return self._cache[path]['exists']
2049 return self._cache[path]['exists']
2048 return self._wrappedctx[path].lexists()
2050 return self._wrappedctx[path].lexists()
2049
2051
2050 def size(self, path):
2052 def size(self, path):
2051 if self.isdirty(path):
2053 if self.isdirty(path):
2052 if self._cache[path]['exists']:
2054 if self._cache[path]['exists']:
2053 return len(self._cache[path]['data'])
2055 return len(self._cache[path]['data'])
2054 else:
2056 else:
2055 raise error.ProgrammingError("No such file or directory: %s" %
2057 raise error.ProgrammingError("No such file or directory: %s" %
2056 self._path)
2058 self._path)
2057 return self._wrappedctx[path].size()
2059 return self._wrappedctx[path].size()
2058
2060
2059 def flushall(self):
2061 def flushall(self):
2060 for path in self._writeorder:
2062 for path in self._writeorder:
2061 entry = self._cache[path]
2063 entry = self._cache[path]
2062 if entry['exists']:
2064 if entry['exists']:
2063 self._wrappedctx[path].clearunknown()
2065 self._wrappedctx[path].clearunknown()
2064 if entry['data'] is not None:
2066 if entry['data'] is not None:
2065 if entry['flags'] is None:
2067 if entry['flags'] is None:
2066 raise error.ProgrammingError('data set but not flags')
2068 raise error.ProgrammingError('data set but not flags')
2067 self._wrappedctx[path].write(
2069 self._wrappedctx[path].write(
2068 entry['data'],
2070 entry['data'],
2069 entry['flags'])
2071 entry['flags'])
2070 else:
2072 else:
2071 self._wrappedctx[path].setflags(
2073 self._wrappedctx[path].setflags(
2072 'l' in entry['flags'],
2074 'l' in entry['flags'],
2073 'x' in entry['flags'])
2075 'x' in entry['flags'])
2074 else:
2076 else:
2075 self._wrappedctx[path].remove(path)
2077 self._wrappedctx[path].remove(path)
2076 self._clean()
2078 self._clean()
2077
2079
2078 def isdirty(self, path):
2080 def isdirty(self, path):
2079 return path in self._cache
2081 return path in self._cache
2080
2082
2081 def _clean(self):
2083 def _clean(self):
2082 self._cache = {}
2084 self._cache = {}
2083 self._writeorder = []
2085 self._writeorder = []
2084
2086
2085 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2087 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2086 if path not in self._cache:
2088 if path not in self._cache:
2087 self._writeorder.append(path)
2089 self._writeorder.append(path)
2088
2090
2089 self._cache[path] = {
2091 self._cache[path] = {
2090 'exists': exists,
2092 'exists': exists,
2091 'data': data,
2093 'data': data,
2092 'date': date,
2094 'date': date,
2093 'flags': flags,
2095 'flags': flags,
2094 }
2096 }
2095
2097
2096 def filectx(self, path, filelog=None):
2098 def filectx(self, path, filelog=None):
2097 return overlayworkingfilectx(self._repo, path, parent=self,
2099 return overlayworkingfilectx(self._repo, path, parent=self,
2098 filelog=filelog)
2100 filelog=filelog)
2099
2101
2100 class overlayworkingfilectx(workingfilectx):
2102 class overlayworkingfilectx(workingfilectx):
2101 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2103 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2102 cache, which can be flushed through later by calling ``flush()``."""
2104 cache, which can be flushed through later by calling ``flush()``."""
2103
2105
2104 def __init__(self, repo, path, filelog=None, parent=None):
2106 def __init__(self, repo, path, filelog=None, parent=None):
2105 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2107 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2106 parent)
2108 parent)
2107 self._repo = repo
2109 self._repo = repo
2108 self._parent = parent
2110 self._parent = parent
2109 self._path = path
2111 self._path = path
2110
2112
2111 def cmp(self, fctx):
2113 def cmp(self, fctx):
2112 return self.data() != fctx.data()
2114 return self.data() != fctx.data()
2113
2115
2114 def ctx(self):
2116 def ctx(self):
2115 return self._parent
2117 return self._parent
2116
2118
2117 def data(self):
2119 def data(self):
2118 return self._parent.data(self._path)
2120 return self._parent.data(self._path)
2119
2121
2120 def date(self):
2122 def date(self):
2121 return self._parent.filedate(self._path)
2123 return self._parent.filedate(self._path)
2122
2124
2123 def exists(self):
2125 def exists(self):
2124 return self.lexists()
2126 return self.lexists()
2125
2127
2126 def lexists(self):
2128 def lexists(self):
2127 return self._parent.exists(self._path)
2129 return self._parent.exists(self._path)
2128
2130
2129 def renamed(self):
2131 def renamed(self):
2130 # Copies are currently tracked in the dirstate as before. Straight copy
2132 # Copies are currently tracked in the dirstate as before. Straight copy
2131 # from workingfilectx.
2133 # from workingfilectx.
2132 rp = self._repo.dirstate.copied(self._path)
2134 rp = self._repo.dirstate.copied(self._path)
2133 if not rp:
2135 if not rp:
2134 return None
2136 return None
2135 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2137 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
2136
2138
2137 def size(self):
2139 def size(self):
2138 return self._parent.size(self._path)
2140 return self._parent.size(self._path)
2139
2141
2140 def audit(self):
2142 def audit(self):
2141 pass
2143 pass
2142
2144
2143 def flags(self):
2145 def flags(self):
2144 return self._parent.flags(self._path)
2146 return self._parent.flags(self._path)
2145
2147
2146 def setflags(self, islink, isexec):
2148 def setflags(self, islink, isexec):
2147 return self._parent.setflags(self._path, islink, isexec)
2149 return self._parent.setflags(self._path, islink, isexec)
2148
2150
2149 def write(self, data, flags, backgroundclose=False):
2151 def write(self, data, flags, backgroundclose=False):
2150 return self._parent.write(self._path, data, flags)
2152 return self._parent.write(self._path, data, flags)
2151
2153
2152 def remove(self, ignoremissing=False):
2154 def remove(self, ignoremissing=False):
2153 return self._parent.remove(self._path)
2155 return self._parent.remove(self._path)
2154
2156
2155 class workingcommitctx(workingctx):
2157 class workingcommitctx(workingctx):
2156 """A workingcommitctx object makes access to data related to
2158 """A workingcommitctx object makes access to data related to
2157 the revision being committed convenient.
2159 the revision being committed convenient.
2158
2160
2159 This hides changes in the working directory, if they aren't
2161 This hides changes in the working directory, if they aren't
2160 committed in this context.
2162 committed in this context.
2161 """
2163 """
2162 def __init__(self, repo, changes,
2164 def __init__(self, repo, changes,
2163 text="", user=None, date=None, extra=None):
2165 text="", user=None, date=None, extra=None):
2164 super(workingctx, self).__init__(repo, text, user, date, extra,
2166 super(workingctx, self).__init__(repo, text, user, date, extra,
2165 changes)
2167 changes)
2166
2168
2167 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2169 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2168 """Return matched files only in ``self._status``
2170 """Return matched files only in ``self._status``
2169
2171
2170 Uncommitted files appear "clean" via this context, even if
2172 Uncommitted files appear "clean" via this context, even if
2171 they aren't actually so in the working directory.
2173 they aren't actually so in the working directory.
2172 """
2174 """
2173 if clean:
2175 if clean:
2174 clean = [f for f in self._manifest if f not in self._changedset]
2176 clean = [f for f in self._manifest if f not in self._changedset]
2175 else:
2177 else:
2176 clean = []
2178 clean = []
2177 return scmutil.status([f for f in self._status.modified if match(f)],
2179 return scmutil.status([f for f in self._status.modified if match(f)],
2178 [f for f in self._status.added if match(f)],
2180 [f for f in self._status.added if match(f)],
2179 [f for f in self._status.removed if match(f)],
2181 [f for f in self._status.removed if match(f)],
2180 [], [], [], clean)
2182 [], [], [], clean)
2181
2183
2182 @propertycache
2184 @propertycache
2183 def _changedset(self):
2185 def _changedset(self):
2184 """Return the set of files changed in this context
2186 """Return the set of files changed in this context
2185 """
2187 """
2186 changed = set(self._status.modified)
2188 changed = set(self._status.modified)
2187 changed.update(self._status.added)
2189 changed.update(self._status.added)
2188 changed.update(self._status.removed)
2190 changed.update(self._status.removed)
2189 return changed
2191 return changed
2190
2192
2191 def makecachingfilectxfn(func):
2193 def makecachingfilectxfn(func):
2192 """Create a filectxfn that caches based on the path.
2194 """Create a filectxfn that caches based on the path.
2193
2195
2194 We can't use util.cachefunc because it uses all arguments as the cache
2196 We can't use util.cachefunc because it uses all arguments as the cache
2195 key and this creates a cycle since the arguments include the repo and
2197 key and this creates a cycle since the arguments include the repo and
2196 memctx.
2198 memctx.
2197 """
2199 """
2198 cache = {}
2200 cache = {}
2199
2201
2200 def getfilectx(repo, memctx, path):
2202 def getfilectx(repo, memctx, path):
2201 if path not in cache:
2203 if path not in cache:
2202 cache[path] = func(repo, memctx, path)
2204 cache[path] = func(repo, memctx, path)
2203 return cache[path]
2205 return cache[path]
2204
2206
2205 return getfilectx
2207 return getfilectx
2206
2208
2207 def memfilefromctx(ctx):
2209 def memfilefromctx(ctx):
2208 """Given a context return a memfilectx for ctx[path]
2210 """Given a context return a memfilectx for ctx[path]
2209
2211
2210 This is a convenience method for building a memctx based on another
2212 This is a convenience method for building a memctx based on another
2211 context.
2213 context.
2212 """
2214 """
2213 def getfilectx(repo, memctx, path):
2215 def getfilectx(repo, memctx, path):
2214 fctx = ctx[path]
2216 fctx = ctx[path]
2215 # this is weird but apparently we only keep track of one parent
2217 # this is weird but apparently we only keep track of one parent
2216 # (why not only store that instead of a tuple?)
2218 # (why not only store that instead of a tuple?)
2217 copied = fctx.renamed()
2219 copied = fctx.renamed()
2218 if copied:
2220 if copied:
2219 copied = copied[0]
2221 copied = copied[0]
2220 return memfilectx(repo, path, fctx.data(),
2222 return memfilectx(repo, path, fctx.data(),
2221 islink=fctx.islink(), isexec=fctx.isexec(),
2223 islink=fctx.islink(), isexec=fctx.isexec(),
2222 copied=copied, memctx=memctx)
2224 copied=copied, memctx=memctx)
2223
2225
2224 return getfilectx
2226 return getfilectx
2225
2227
2226 def memfilefrompatch(patchstore):
2228 def memfilefrompatch(patchstore):
2227 """Given a patch (e.g. patchstore object) return a memfilectx
2229 """Given a patch (e.g. patchstore object) return a memfilectx
2228
2230
2229 This is a convenience method for building a memctx based on a patchstore.
2231 This is a convenience method for building a memctx based on a patchstore.
2230 """
2232 """
2231 def getfilectx(repo, memctx, path):
2233 def getfilectx(repo, memctx, path):
2232 data, mode, copied = patchstore.getfile(path)
2234 data, mode, copied = patchstore.getfile(path)
2233 if data is None:
2235 if data is None:
2234 return None
2236 return None
2235 islink, isexec = mode
2237 islink, isexec = mode
2236 return memfilectx(repo, path, data, islink=islink,
2238 return memfilectx(repo, path, data, islink=islink,
2237 isexec=isexec, copied=copied,
2239 isexec=isexec, copied=copied,
2238 memctx=memctx)
2240 memctx=memctx)
2239
2241
2240 return getfilectx
2242 return getfilectx
2241
2243
2242 class memctx(committablectx):
2244 class memctx(committablectx):
2243 """Use memctx to perform in-memory commits via localrepo.commitctx().
2245 """Use memctx to perform in-memory commits via localrepo.commitctx().
2244
2246
2245 Revision information is supplied at initialization time while
2247 Revision information is supplied at initialization time while
2246 related files data and is made available through a callback
2248 related files data and is made available through a callback
2247 mechanism. 'repo' is the current localrepo, 'parents' is a
2249 mechanism. 'repo' is the current localrepo, 'parents' is a
2248 sequence of two parent revisions identifiers (pass None for every
2250 sequence of two parent revisions identifiers (pass None for every
2249 missing parent), 'text' is the commit message and 'files' lists
2251 missing parent), 'text' is the commit message and 'files' lists
2250 names of files touched by the revision (normalized and relative to
2252 names of files touched by the revision (normalized and relative to
2251 repository root).
2253 repository root).
2252
2254
2253 filectxfn(repo, memctx, path) is a callable receiving the
2255 filectxfn(repo, memctx, path) is a callable receiving the
2254 repository, the current memctx object and the normalized path of
2256 repository, the current memctx object and the normalized path of
2255 requested file, relative to repository root. It is fired by the
2257 requested file, relative to repository root. It is fired by the
2256 commit function for every file in 'files', but calls order is
2258 commit function for every file in 'files', but calls order is
2257 undefined. If the file is available in the revision being
2259 undefined. If the file is available in the revision being
2258 committed (updated or added), filectxfn returns a memfilectx
2260 committed (updated or added), filectxfn returns a memfilectx
2259 object. If the file was removed, filectxfn return None for recent
2261 object. If the file was removed, filectxfn return None for recent
2260 Mercurial. Moved files are represented by marking the source file
2262 Mercurial. Moved files are represented by marking the source file
2261 removed and the new file added with copy information (see
2263 removed and the new file added with copy information (see
2262 memfilectx).
2264 memfilectx).
2263
2265
2264 user receives the committer name and defaults to current
2266 user receives the committer name and defaults to current
2265 repository username, date is the commit date in any format
2267 repository username, date is the commit date in any format
2266 supported by util.parsedate() and defaults to current date, extra
2268 supported by util.parsedate() and defaults to current date, extra
2267 is a dictionary of metadata or is left empty.
2269 is a dictionary of metadata or is left empty.
2268 """
2270 """
2269
2271
2270 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2272 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2271 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2273 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2272 # this field to determine what to do in filectxfn.
2274 # this field to determine what to do in filectxfn.
2273 _returnnoneformissingfiles = True
2275 _returnnoneformissingfiles = True
2274
2276
2275 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2277 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2276 date=None, extra=None, branch=None, editor=False):
2278 date=None, extra=None, branch=None, editor=False):
2277 super(memctx, self).__init__(repo, text, user, date, extra)
2279 super(memctx, self).__init__(repo, text, user, date, extra)
2278 self._rev = None
2280 self._rev = None
2279 self._node = None
2281 self._node = None
2280 parents = [(p or nullid) for p in parents]
2282 parents = [(p or nullid) for p in parents]
2281 p1, p2 = parents
2283 p1, p2 = parents
2282 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2284 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2283 files = sorted(set(files))
2285 files = sorted(set(files))
2284 self._files = files
2286 self._files = files
2285 if branch is not None:
2287 if branch is not None:
2286 self._extra['branch'] = encoding.fromlocal(branch)
2288 self._extra['branch'] = encoding.fromlocal(branch)
2287 self.substate = {}
2289 self.substate = {}
2288
2290
2289 if isinstance(filectxfn, patch.filestore):
2291 if isinstance(filectxfn, patch.filestore):
2290 filectxfn = memfilefrompatch(filectxfn)
2292 filectxfn = memfilefrompatch(filectxfn)
2291 elif not callable(filectxfn):
2293 elif not callable(filectxfn):
2292 # if store is not callable, wrap it in a function
2294 # if store is not callable, wrap it in a function
2293 filectxfn = memfilefromctx(filectxfn)
2295 filectxfn = memfilefromctx(filectxfn)
2294
2296
2295 # memoizing increases performance for e.g. vcs convert scenarios.
2297 # memoizing increases performance for e.g. vcs convert scenarios.
2296 self._filectxfn = makecachingfilectxfn(filectxfn)
2298 self._filectxfn = makecachingfilectxfn(filectxfn)
2297
2299
2298 if editor:
2300 if editor:
2299 self._text = editor(self._repo, self, [])
2301 self._text = editor(self._repo, self, [])
2300 self._repo.savecommitmessage(self._text)
2302 self._repo.savecommitmessage(self._text)
2301
2303
2302 def filectx(self, path, filelog=None):
2304 def filectx(self, path, filelog=None):
2303 """get a file context from the working directory
2305 """get a file context from the working directory
2304
2306
2305 Returns None if file doesn't exist and should be removed."""
2307 Returns None if file doesn't exist and should be removed."""
2306 return self._filectxfn(self._repo, self, path)
2308 return self._filectxfn(self._repo, self, path)
2307
2309
2308 def commit(self):
2310 def commit(self):
2309 """commit context to the repo"""
2311 """commit context to the repo"""
2310 return self._repo.commitctx(self)
2312 return self._repo.commitctx(self)
2311
2313
2312 @propertycache
2314 @propertycache
2313 def _manifest(self):
2315 def _manifest(self):
2314 """generate a manifest based on the return values of filectxfn"""
2316 """generate a manifest based on the return values of filectxfn"""
2315
2317
2316 # keep this simple for now; just worry about p1
2318 # keep this simple for now; just worry about p1
2317 pctx = self._parents[0]
2319 pctx = self._parents[0]
2318 man = pctx.manifest().copy()
2320 man = pctx.manifest().copy()
2319
2321
2320 for f in self._status.modified:
2322 for f in self._status.modified:
2321 p1node = nullid
2323 p1node = nullid
2322 p2node = nullid
2324 p2node = nullid
2323 p = pctx[f].parents() # if file isn't in pctx, check p2?
2325 p = pctx[f].parents() # if file isn't in pctx, check p2?
2324 if len(p) > 0:
2326 if len(p) > 0:
2325 p1node = p[0].filenode()
2327 p1node = p[0].filenode()
2326 if len(p) > 1:
2328 if len(p) > 1:
2327 p2node = p[1].filenode()
2329 p2node = p[1].filenode()
2328 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2330 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2329
2331
2330 for f in self._status.added:
2332 for f in self._status.added:
2331 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2333 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2332
2334
2333 for f in self._status.removed:
2335 for f in self._status.removed:
2334 if f in man:
2336 if f in man:
2335 del man[f]
2337 del man[f]
2336
2338
2337 return man
2339 return man
2338
2340
2339 @propertycache
2341 @propertycache
2340 def _status(self):
2342 def _status(self):
2341 """Calculate exact status from ``files`` specified at construction
2343 """Calculate exact status from ``files`` specified at construction
2342 """
2344 """
2343 man1 = self.p1().manifest()
2345 man1 = self.p1().manifest()
2344 p2 = self._parents[1]
2346 p2 = self._parents[1]
2345 # "1 < len(self._parents)" can't be used for checking
2347 # "1 < len(self._parents)" can't be used for checking
2346 # existence of the 2nd parent, because "memctx._parents" is
2348 # existence of the 2nd parent, because "memctx._parents" is
2347 # explicitly initialized by the list, of which length is 2.
2349 # explicitly initialized by the list, of which length is 2.
2348 if p2.node() != nullid:
2350 if p2.node() != nullid:
2349 man2 = p2.manifest()
2351 man2 = p2.manifest()
2350 managing = lambda f: f in man1 or f in man2
2352 managing = lambda f: f in man1 or f in man2
2351 else:
2353 else:
2352 managing = lambda f: f in man1
2354 managing = lambda f: f in man1
2353
2355
2354 modified, added, removed = [], [], []
2356 modified, added, removed = [], [], []
2355 for f in self._files:
2357 for f in self._files:
2356 if not managing(f):
2358 if not managing(f):
2357 added.append(f)
2359 added.append(f)
2358 elif self[f]:
2360 elif self[f]:
2359 modified.append(f)
2361 modified.append(f)
2360 else:
2362 else:
2361 removed.append(f)
2363 removed.append(f)
2362
2364
2363 return scmutil.status(modified, added, removed, [], [], [], [])
2365 return scmutil.status(modified, added, removed, [], [], [], [])
2364
2366
2365 class memfilectx(committablefilectx):
2367 class memfilectx(committablefilectx):
2366 """memfilectx represents an in-memory file to commit.
2368 """memfilectx represents an in-memory file to commit.
2367
2369
2368 See memctx and committablefilectx for more details.
2370 See memctx and committablefilectx for more details.
2369 """
2371 """
2370 def __init__(self, repo, path, data, islink=False,
2372 def __init__(self, repo, path, data, islink=False,
2371 isexec=False, copied=None, memctx=None):
2373 isexec=False, copied=None, memctx=None):
2372 """
2374 """
2373 path is the normalized file path relative to repository root.
2375 path is the normalized file path relative to repository root.
2374 data is the file content as a string.
2376 data is the file content as a string.
2375 islink is True if the file is a symbolic link.
2377 islink is True if the file is a symbolic link.
2376 isexec is True if the file is executable.
2378 isexec is True if the file is executable.
2377 copied is the source file path if current file was copied in the
2379 copied is the source file path if current file was copied in the
2378 revision being committed, or None."""
2380 revision being committed, or None."""
2379 super(memfilectx, self).__init__(repo, path, None, memctx)
2381 super(memfilectx, self).__init__(repo, path, None, memctx)
2380 self._data = data
2382 self._data = data
2381 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2383 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2382 self._copied = None
2384 self._copied = None
2383 if copied:
2385 if copied:
2384 self._copied = (copied, nullid)
2386 self._copied = (copied, nullid)
2385
2387
2386 def data(self):
2388 def data(self):
2387 return self._data
2389 return self._data
2388
2390
2389 def remove(self, ignoremissing=False):
2391 def remove(self, ignoremissing=False):
2390 """wraps unlink for a repo's working directory"""
2392 """wraps unlink for a repo's working directory"""
2391 # need to figure out what to do here
2393 # need to figure out what to do here
2392 del self._changectx[self._path]
2394 del self._changectx[self._path]
2393
2395
2394 def write(self, data, flags):
2396 def write(self, data, flags):
2395 """wraps repo.wwrite"""
2397 """wraps repo.wwrite"""
2396 self._data = data
2398 self._data = data
2397
2399
2398 class overlayfilectx(committablefilectx):
2400 class overlayfilectx(committablefilectx):
2399 """Like memfilectx but take an original filectx and optional parameters to
2401 """Like memfilectx but take an original filectx and optional parameters to
2400 override parts of it. This is useful when fctx.data() is expensive (i.e.
2402 override parts of it. This is useful when fctx.data() is expensive (i.e.
2401 flag processor is expensive) and raw data, flags, and filenode could be
2403 flag processor is expensive) and raw data, flags, and filenode could be
2402 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2404 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2403 """
2405 """
2404
2406
2405 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2407 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2406 copied=None, ctx=None):
2408 copied=None, ctx=None):
2407 """originalfctx: filecontext to duplicate
2409 """originalfctx: filecontext to duplicate
2408
2410
2409 datafunc: None or a function to override data (file content). It is a
2411 datafunc: None or a function to override data (file content). It is a
2410 function to be lazy. path, flags, copied, ctx: None or overridden value
2412 function to be lazy. path, flags, copied, ctx: None or overridden value
2411
2413
2412 copied could be (path, rev), or False. copied could also be just path,
2414 copied could be (path, rev), or False. copied could also be just path,
2413 and will be converted to (path, nullid). This simplifies some callers.
2415 and will be converted to (path, nullid). This simplifies some callers.
2414 """
2416 """
2415
2417
2416 if path is None:
2418 if path is None:
2417 path = originalfctx.path()
2419 path = originalfctx.path()
2418 if ctx is None:
2420 if ctx is None:
2419 ctx = originalfctx.changectx()
2421 ctx = originalfctx.changectx()
2420 ctxmatch = lambda: True
2422 ctxmatch = lambda: True
2421 else:
2423 else:
2422 ctxmatch = lambda: ctx == originalfctx.changectx()
2424 ctxmatch = lambda: ctx == originalfctx.changectx()
2423
2425
2424 repo = originalfctx.repo()
2426 repo = originalfctx.repo()
2425 flog = originalfctx.filelog()
2427 flog = originalfctx.filelog()
2426 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2428 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2427
2429
2428 if copied is None:
2430 if copied is None:
2429 copied = originalfctx.renamed()
2431 copied = originalfctx.renamed()
2430 copiedmatch = lambda: True
2432 copiedmatch = lambda: True
2431 else:
2433 else:
2432 if copied and not isinstance(copied, tuple):
2434 if copied and not isinstance(copied, tuple):
2433 # repo._filecommit will recalculate copyrev so nullid is okay
2435 # repo._filecommit will recalculate copyrev so nullid is okay
2434 copied = (copied, nullid)
2436 copied = (copied, nullid)
2435 copiedmatch = lambda: copied == originalfctx.renamed()
2437 copiedmatch = lambda: copied == originalfctx.renamed()
2436
2438
2437 # When data, copied (could affect data), ctx (could affect filelog
2439 # When data, copied (could affect data), ctx (could affect filelog
2438 # parents) are not overridden, rawdata, rawflags, and filenode may be
2440 # parents) are not overridden, rawdata, rawflags, and filenode may be
2439 # reused (repo._filecommit should double check filelog parents).
2441 # reused (repo._filecommit should double check filelog parents).
2440 #
2442 #
2441 # path, flags are not hashed in filelog (but in manifestlog) so they do
2443 # path, flags are not hashed in filelog (but in manifestlog) so they do
2442 # not affect reusable here.
2444 # not affect reusable here.
2443 #
2445 #
2444 # If ctx or copied is overridden to a same value with originalfctx,
2446 # If ctx or copied is overridden to a same value with originalfctx,
2445 # still consider it's reusable. originalfctx.renamed() may be a bit
2447 # still consider it's reusable. originalfctx.renamed() may be a bit
2446 # expensive so it's not called unless necessary. Assuming datafunc is
2448 # expensive so it's not called unless necessary. Assuming datafunc is
2447 # always expensive, do not call it for this "reusable" test.
2449 # always expensive, do not call it for this "reusable" test.
2448 reusable = datafunc is None and ctxmatch() and copiedmatch()
2450 reusable = datafunc is None and ctxmatch() and copiedmatch()
2449
2451
2450 if datafunc is None:
2452 if datafunc is None:
2451 datafunc = originalfctx.data
2453 datafunc = originalfctx.data
2452 if flags is None:
2454 if flags is None:
2453 flags = originalfctx.flags()
2455 flags = originalfctx.flags()
2454
2456
2455 self._datafunc = datafunc
2457 self._datafunc = datafunc
2456 self._flags = flags
2458 self._flags = flags
2457 self._copied = copied
2459 self._copied = copied
2458
2460
2459 if reusable:
2461 if reusable:
2460 # copy extra fields from originalfctx
2462 # copy extra fields from originalfctx
2461 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2463 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2462 for attr_ in attrs:
2464 for attr_ in attrs:
2463 if util.safehasattr(originalfctx, attr_):
2465 if util.safehasattr(originalfctx, attr_):
2464 setattr(self, attr_, getattr(originalfctx, attr_))
2466 setattr(self, attr_, getattr(originalfctx, attr_))
2465
2467
2466 def data(self):
2468 def data(self):
2467 return self._datafunc()
2469 return self._datafunc()
2468
2470
2469 class metadataonlyctx(committablectx):
2471 class metadataonlyctx(committablectx):
2470 """Like memctx but it's reusing the manifest of different commit.
2472 """Like memctx but it's reusing the manifest of different commit.
2471 Intended to be used by lightweight operations that are creating
2473 Intended to be used by lightweight operations that are creating
2472 metadata-only changes.
2474 metadata-only changes.
2473
2475
2474 Revision information is supplied at initialization time. 'repo' is the
2476 Revision information is supplied at initialization time. 'repo' is the
2475 current localrepo, 'ctx' is original revision which manifest we're reuisng
2477 current localrepo, 'ctx' is original revision which manifest we're reuisng
2476 'parents' is a sequence of two parent revisions identifiers (pass None for
2478 'parents' is a sequence of two parent revisions identifiers (pass None for
2477 every missing parent), 'text' is the commit.
2479 every missing parent), 'text' is the commit.
2478
2480
2479 user receives the committer name and defaults to current repository
2481 user receives the committer name and defaults to current repository
2480 username, date is the commit date in any format supported by
2482 username, date is the commit date in any format supported by
2481 util.parsedate() and defaults to current date, extra is a dictionary of
2483 util.parsedate() and defaults to current date, extra is a dictionary of
2482 metadata or is left empty.
2484 metadata or is left empty.
2483 """
2485 """
2484 def __new__(cls, repo, originalctx, *args, **kwargs):
2486 def __new__(cls, repo, originalctx, *args, **kwargs):
2485 return super(metadataonlyctx, cls).__new__(cls, repo)
2487 return super(metadataonlyctx, cls).__new__(cls, repo)
2486
2488
2487 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2489 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2488 date=None, extra=None, editor=False):
2490 date=None, extra=None, editor=False):
2489 if text is None:
2491 if text is None:
2490 text = originalctx.description()
2492 text = originalctx.description()
2491 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2493 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2492 self._rev = None
2494 self._rev = None
2493 self._node = None
2495 self._node = None
2494 self._originalctx = originalctx
2496 self._originalctx = originalctx
2495 self._manifestnode = originalctx.manifestnode()
2497 self._manifestnode = originalctx.manifestnode()
2496 if parents is None:
2498 if parents is None:
2497 parents = originalctx.parents()
2499 parents = originalctx.parents()
2498 else:
2500 else:
2499 parents = [repo[p] for p in parents if p is not None]
2501 parents = [repo[p] for p in parents if p is not None]
2500 parents = parents[:]
2502 parents = parents[:]
2501 while len(parents) < 2:
2503 while len(parents) < 2:
2502 parents.append(repo[nullid])
2504 parents.append(repo[nullid])
2503 p1, p2 = self._parents = parents
2505 p1, p2 = self._parents = parents
2504
2506
2505 # sanity check to ensure that the reused manifest parents are
2507 # sanity check to ensure that the reused manifest parents are
2506 # manifests of our commit parents
2508 # manifests of our commit parents
2507 mp1, mp2 = self.manifestctx().parents
2509 mp1, mp2 = self.manifestctx().parents
2508 if p1 != nullid and p1.manifestnode() != mp1:
2510 if p1 != nullid and p1.manifestnode() != mp1:
2509 raise RuntimeError('can\'t reuse the manifest: '
2511 raise RuntimeError('can\'t reuse the manifest: '
2510 'its p1 doesn\'t match the new ctx p1')
2512 'its p1 doesn\'t match the new ctx p1')
2511 if p2 != nullid and p2.manifestnode() != mp2:
2513 if p2 != nullid and p2.manifestnode() != mp2:
2512 raise RuntimeError('can\'t reuse the manifest: '
2514 raise RuntimeError('can\'t reuse the manifest: '
2513 'its p2 doesn\'t match the new ctx p2')
2515 'its p2 doesn\'t match the new ctx p2')
2514
2516
2515 self._files = originalctx.files()
2517 self._files = originalctx.files()
2516 self.substate = {}
2518 self.substate = {}
2517
2519
2518 if editor:
2520 if editor:
2519 self._text = editor(self._repo, self, [])
2521 self._text = editor(self._repo, self, [])
2520 self._repo.savecommitmessage(self._text)
2522 self._repo.savecommitmessage(self._text)
2521
2523
2522 def manifestnode(self):
2524 def manifestnode(self):
2523 return self._manifestnode
2525 return self._manifestnode
2524
2526
2525 @property
2527 @property
2526 def _manifestctx(self):
2528 def _manifestctx(self):
2527 return self._repo.manifestlog[self._manifestnode]
2529 return self._repo.manifestlog[self._manifestnode]
2528
2530
2529 def filectx(self, path, filelog=None):
2531 def filectx(self, path, filelog=None):
2530 return self._originalctx.filectx(path, filelog=filelog)
2532 return self._originalctx.filectx(path, filelog=filelog)
2531
2533
2532 def commit(self):
2534 def commit(self):
2533 """commit context to the repo"""
2535 """commit context to the repo"""
2534 return self._repo.commitctx(self)
2536 return self._repo.commitctx(self)
2535
2537
2536 @property
2538 @property
2537 def _manifest(self):
2539 def _manifest(self):
2538 return self._originalctx.manifest()
2540 return self._originalctx.manifest()
2539
2541
2540 @propertycache
2542 @propertycache
2541 def _status(self):
2543 def _status(self):
2542 """Calculate exact status from ``files`` specified in the ``origctx``
2544 """Calculate exact status from ``files`` specified in the ``origctx``
2543 and parents manifests.
2545 and parents manifests.
2544 """
2546 """
2545 man1 = self.p1().manifest()
2547 man1 = self.p1().manifest()
2546 p2 = self._parents[1]
2548 p2 = self._parents[1]
2547 # "1 < len(self._parents)" can't be used for checking
2549 # "1 < len(self._parents)" can't be used for checking
2548 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2550 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2549 # explicitly initialized by the list, of which length is 2.
2551 # explicitly initialized by the list, of which length is 2.
2550 if p2.node() != nullid:
2552 if p2.node() != nullid:
2551 man2 = p2.manifest()
2553 man2 = p2.manifest()
2552 managing = lambda f: f in man1 or f in man2
2554 managing = lambda f: f in man1 or f in man2
2553 else:
2555 else:
2554 managing = lambda f: f in man1
2556 managing = lambda f: f in man1
2555
2557
2556 modified, added, removed = [], [], []
2558 modified, added, removed = [], [], []
2557 for f in self._files:
2559 for f in self._files:
2558 if not managing(f):
2560 if not managing(f):
2559 added.append(f)
2561 added.append(f)
2560 elif f in self:
2562 elif f in self:
2561 modified.append(f)
2563 modified.append(f)
2562 else:
2564 else:
2563 removed.append(f)
2565 removed.append(f)
2564
2566
2565 return scmutil.status(modified, added, removed, [], [], [], [])
2567 return scmutil.status(modified, added, removed, [], [], [], [])
2566
2568
2567 class arbitraryfilectx(object):
2569 class arbitraryfilectx(object):
2568 """Allows you to use filectx-like functions on a file in an arbitrary
2570 """Allows you to use filectx-like functions on a file in an arbitrary
2569 location on disk, possibly not in the working directory.
2571 location on disk, possibly not in the working directory.
2570 """
2572 """
2571 def __init__(self, path, repo=None):
2573 def __init__(self, path, repo=None):
2572 # Repo is optional because contrib/simplemerge uses this class.
2574 # Repo is optional because contrib/simplemerge uses this class.
2573 self._repo = repo
2575 self._repo = repo
2574 self._path = path
2576 self._path = path
2575
2577
2576 def cmp(self, fctx):
2578 def cmp(self, fctx):
2577 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2579 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2578 # path if either side is a symlink.
2580 # path if either side is a symlink.
2579 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2581 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2580 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2582 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2581 # Add a fast-path for merge if both sides are disk-backed.
2583 # Add a fast-path for merge if both sides are disk-backed.
2582 # Note that filecmp uses the opposite return values (True if same)
2584 # Note that filecmp uses the opposite return values (True if same)
2583 # from our cmp functions (True if different).
2585 # from our cmp functions (True if different).
2584 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2586 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2585 return self.data() != fctx.data()
2587 return self.data() != fctx.data()
2586
2588
2587 def path(self):
2589 def path(self):
2588 return self._path
2590 return self._path
2589
2591
2590 def flags(self):
2592 def flags(self):
2591 return ''
2593 return ''
2592
2594
2593 def data(self):
2595 def data(self):
2594 return util.readfile(self._path)
2596 return util.readfile(self._path)
2595
2597
2596 def decodeddata(self):
2598 def decodeddata(self):
2597 with open(self._path, "rb") as f:
2599 with open(self._path, "rb") as f:
2598 return f.read()
2600 return f.read()
2599
2601
2600 def remove(self):
2602 def remove(self):
2601 util.unlink(self._path)
2603 util.unlink(self._path)
2602
2604
2603 def write(self, data, flags):
2605 def write(self, data, flags):
2604 assert not flags
2606 assert not flags
2605 with open(self._path, "w") as f:
2607 with open(self._path, "w") as f:
2606 f.write(data)
2608 f.write(data)
@@ -1,644 +1,645
1 # hgweb/webutil.py - utility library for the web interface.
1 # hgweb/webutil.py - utility library for the web interface.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import copy
11 import copy
12 import difflib
12 import difflib
13 import os
13 import os
14 import re
14 import re
15
15
16 from ..i18n import _
16 from ..i18n import _
17 from ..node import hex, nullid, short
17 from ..node import hex, nullid, short
18
18
19 from .common import (
19 from .common import (
20 ErrorResponse,
20 ErrorResponse,
21 HTTP_BAD_REQUEST,
21 HTTP_BAD_REQUEST,
22 HTTP_NOT_FOUND,
22 HTTP_NOT_FOUND,
23 paritygen,
23 paritygen,
24 )
24 )
25
25
26 from .. import (
26 from .. import (
27 context,
27 context,
28 error,
28 error,
29 match,
29 match,
30 mdiff,
30 mdiff,
31 patch,
31 patch,
32 pathutil,
32 pathutil,
33 pycompat,
33 pycompat,
34 templatefilters,
34 templatefilters,
35 ui as uimod,
35 ui as uimod,
36 util,
36 util,
37 )
37 )
38
38
39 def up(p):
39 def up(p):
40 if p[0] != "/":
40 if p[0] != "/":
41 p = "/" + p
41 p = "/" + p
42 if p[-1] == "/":
42 if p[-1] == "/":
43 p = p[:-1]
43 p = p[:-1]
44 up = os.path.dirname(p)
44 up = os.path.dirname(p)
45 if up == "/":
45 if up == "/":
46 return "/"
46 return "/"
47 return up + "/"
47 return up + "/"
48
48
49 def _navseq(step, firststep=None):
49 def _navseq(step, firststep=None):
50 if firststep:
50 if firststep:
51 yield firststep
51 yield firststep
52 if firststep >= 20 and firststep <= 40:
52 if firststep >= 20 and firststep <= 40:
53 firststep = 50
53 firststep = 50
54 yield firststep
54 yield firststep
55 assert step > 0
55 assert step > 0
56 assert firststep > 0
56 assert firststep > 0
57 while step <= firststep:
57 while step <= firststep:
58 step *= 10
58 step *= 10
59 while True:
59 while True:
60 yield 1 * step
60 yield 1 * step
61 yield 3 * step
61 yield 3 * step
62 step *= 10
62 step *= 10
63
63
64 class revnav(object):
64 class revnav(object):
65
65
66 def __init__(self, repo):
66 def __init__(self, repo):
67 """Navigation generation object
67 """Navigation generation object
68
68
69 :repo: repo object we generate nav for
69 :repo: repo object we generate nav for
70 """
70 """
71 # used for hex generation
71 # used for hex generation
72 self._revlog = repo.changelog
72 self._revlog = repo.changelog
73
73
74 def __nonzero__(self):
74 def __nonzero__(self):
75 """return True if any revision to navigate over"""
75 """return True if any revision to navigate over"""
76 return self._first() is not None
76 return self._first() is not None
77
77
78 __bool__ = __nonzero__
78 __bool__ = __nonzero__
79
79
80 def _first(self):
80 def _first(self):
81 """return the minimum non-filtered changeset or None"""
81 """return the minimum non-filtered changeset or None"""
82 try:
82 try:
83 return next(iter(self._revlog))
83 return next(iter(self._revlog))
84 except StopIteration:
84 except StopIteration:
85 return None
85 return None
86
86
87 def hex(self, rev):
87 def hex(self, rev):
88 return hex(self._revlog.node(rev))
88 return hex(self._revlog.node(rev))
89
89
90 def gen(self, pos, pagelen, limit):
90 def gen(self, pos, pagelen, limit):
91 """computes label and revision id for navigation link
91 """computes label and revision id for navigation link
92
92
93 :pos: is the revision relative to which we generate navigation.
93 :pos: is the revision relative to which we generate navigation.
94 :pagelen: the size of each navigation page
94 :pagelen: the size of each navigation page
95 :limit: how far shall we link
95 :limit: how far shall we link
96
96
97 The return is:
97 The return is:
98 - a single element tuple
98 - a single element tuple
99 - containing a dictionary with a `before` and `after` key
99 - containing a dictionary with a `before` and `after` key
100 - values are generator functions taking arbitrary number of kwargs
100 - values are generator functions taking arbitrary number of kwargs
101 - yield items are dictionaries with `label` and `node` keys
101 - yield items are dictionaries with `label` and `node` keys
102 """
102 """
103 if not self:
103 if not self:
104 # empty repo
104 # empty repo
105 return ({'before': (), 'after': ()},)
105 return ({'before': (), 'after': ()},)
106
106
107 targets = []
107 targets = []
108 for f in _navseq(1, pagelen):
108 for f in _navseq(1, pagelen):
109 if f > limit:
109 if f > limit:
110 break
110 break
111 targets.append(pos + f)
111 targets.append(pos + f)
112 targets.append(pos - f)
112 targets.append(pos - f)
113 targets.sort()
113 targets.sort()
114
114
115 first = self._first()
115 first = self._first()
116 navbefore = [("(%i)" % first, self.hex(first))]
116 navbefore = [("(%i)" % first, self.hex(first))]
117 navafter = []
117 navafter = []
118 for rev in targets:
118 for rev in targets:
119 if rev not in self._revlog:
119 if rev not in self._revlog:
120 continue
120 continue
121 if pos < rev < limit:
121 if pos < rev < limit:
122 navafter.append(("+%d" % abs(rev - pos), self.hex(rev)))
122 navafter.append(("+%d" % abs(rev - pos), self.hex(rev)))
123 if 0 < rev < pos:
123 if 0 < rev < pos:
124 navbefore.append(("-%d" % abs(rev - pos), self.hex(rev)))
124 navbefore.append(("-%d" % abs(rev - pos), self.hex(rev)))
125
125
126
126
127 navafter.append(("tip", "tip"))
127 navafter.append(("tip", "tip"))
128
128
129 data = lambda i: {"label": i[0], "node": i[1]}
129 data = lambda i: {"label": i[0], "node": i[1]}
130 return ({'before': lambda **map: (data(i) for i in navbefore),
130 return ({'before': lambda **map: (data(i) for i in navbefore),
131 'after': lambda **map: (data(i) for i in navafter)},)
131 'after': lambda **map: (data(i) for i in navafter)},)
132
132
133 class filerevnav(revnav):
133 class filerevnav(revnav):
134
134
135 def __init__(self, repo, path):
135 def __init__(self, repo, path):
136 """Navigation generation object
136 """Navigation generation object
137
137
138 :repo: repo object we generate nav for
138 :repo: repo object we generate nav for
139 :path: path of the file we generate nav for
139 :path: path of the file we generate nav for
140 """
140 """
141 # used for iteration
141 # used for iteration
142 self._changelog = repo.unfiltered().changelog
142 self._changelog = repo.unfiltered().changelog
143 # used for hex generation
143 # used for hex generation
144 self._revlog = repo.file(path)
144 self._revlog = repo.file(path)
145
145
146 def hex(self, rev):
146 def hex(self, rev):
147 return hex(self._changelog.node(self._revlog.linkrev(rev)))
147 return hex(self._changelog.node(self._revlog.linkrev(rev)))
148
148
149 class _siblings(object):
149 class _siblings(object):
150 def __init__(self, siblings=None, hiderev=None):
150 def __init__(self, siblings=None, hiderev=None):
151 if siblings is None:
151 if siblings is None:
152 siblings = []
152 siblings = []
153 self.siblings = [s for s in siblings if s.node() != nullid]
153 self.siblings = [s for s in siblings if s.node() != nullid]
154 if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev:
154 if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev:
155 self.siblings = []
155 self.siblings = []
156
156
157 def __iter__(self):
157 def __iter__(self):
158 for s in self.siblings:
158 for s in self.siblings:
159 d = {
159 d = {
160 'node': s.hex(),
160 'node': s.hex(),
161 'rev': s.rev(),
161 'rev': s.rev(),
162 'user': s.user(),
162 'user': s.user(),
163 'date': s.date(),
163 'date': s.date(),
164 'description': s.description(),
164 'description': s.description(),
165 'branch': s.branch(),
165 'branch': s.branch(),
166 }
166 }
167 if util.safehasattr(s, 'path'):
167 if util.safehasattr(s, 'path'):
168 d['file'] = s.path()
168 d['file'] = s.path()
169 yield d
169 yield d
170
170
171 def __len__(self):
171 def __len__(self):
172 return len(self.siblings)
172 return len(self.siblings)
173
173
174 def difffeatureopts(req, ui, section):
174 def difffeatureopts(req, ui, section):
175 diffopts = patch.difffeatureopts(ui, untrusted=True,
175 diffopts = patch.difffeatureopts(ui, untrusted=True,
176 section=section, whitespace=True)
176 section=section, whitespace=True)
177
177
178 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
178 for k in ('ignorews', 'ignorewsamount', 'ignorewseol', 'ignoreblanklines'):
179 v = req.form.get(k, [None])[0]
179 v = req.form.get(k, [None])[0]
180 if v is not None:
180 if v is not None:
181 v = util.parsebool(v)
181 v = util.parsebool(v)
182 setattr(diffopts, k, v if v is not None else True)
182 setattr(diffopts, k, v if v is not None else True)
183
183
184 return diffopts
184 return diffopts
185
185
186 def annotate(req, fctx, ui):
186 def annotate(req, fctx, ui):
187 diffopts = difffeatureopts(req, ui, 'annotate')
187 diffopts = difffeatureopts(req, ui, 'annotate')
188 return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts)
188 return fctx.annotate(follow=True, linenumber=True, diffopts=diffopts)
189
189
190 def parents(ctx, hide=None):
190 def parents(ctx, hide=None):
191 if isinstance(ctx, context.basefilectx):
191 if isinstance(ctx, context.basefilectx):
192 introrev = ctx.introrev()
192 introrev = ctx.introrev()
193 if ctx.changectx().rev() != introrev:
193 if ctx.changectx().rev() != introrev:
194 return _siblings([ctx.repo()[introrev]], hide)
194 return _siblings([ctx.repo()[introrev]], hide)
195 return _siblings(ctx.parents(), hide)
195 return _siblings(ctx.parents(), hide)
196
196
197 def children(ctx, hide=None):
197 def children(ctx, hide=None):
198 return _siblings(ctx.children(), hide)
198 return _siblings(ctx.children(), hide)
199
199
200 def renamelink(fctx):
200 def renamelink(fctx):
201 r = fctx.renamed()
201 r = fctx.renamed()
202 if r:
202 if r:
203 return [{'file': r[0], 'node': hex(r[1])}]
203 return [{'file': r[0], 'node': hex(r[1])}]
204 return []
204 return []
205
205
206 def nodetagsdict(repo, node):
206 def nodetagsdict(repo, node):
207 return [{"name": i} for i in repo.nodetags(node)]
207 return [{"name": i} for i in repo.nodetags(node)]
208
208
209 def nodebookmarksdict(repo, node):
209 def nodebookmarksdict(repo, node):
210 return [{"name": i} for i in repo.nodebookmarks(node)]
210 return [{"name": i} for i in repo.nodebookmarks(node)]
211
211
212 def nodebranchdict(repo, ctx):
212 def nodebranchdict(repo, ctx):
213 branches = []
213 branches = []
214 branch = ctx.branch()
214 branch = ctx.branch()
215 # If this is an empty repo, ctx.node() == nullid,
215 # If this is an empty repo, ctx.node() == nullid,
216 # ctx.branch() == 'default'.
216 # ctx.branch() == 'default'.
217 try:
217 try:
218 branchnode = repo.branchtip(branch)
218 branchnode = repo.branchtip(branch)
219 except error.RepoLookupError:
219 except error.RepoLookupError:
220 branchnode = None
220 branchnode = None
221 if branchnode == ctx.node():
221 if branchnode == ctx.node():
222 branches.append({"name": branch})
222 branches.append({"name": branch})
223 return branches
223 return branches
224
224
225 def nodeinbranch(repo, ctx):
225 def nodeinbranch(repo, ctx):
226 branches = []
226 branches = []
227 branch = ctx.branch()
227 branch = ctx.branch()
228 try:
228 try:
229 branchnode = repo.branchtip(branch)
229 branchnode = repo.branchtip(branch)
230 except error.RepoLookupError:
230 except error.RepoLookupError:
231 branchnode = None
231 branchnode = None
232 if branch != 'default' and branchnode != ctx.node():
232 if branch != 'default' and branchnode != ctx.node():
233 branches.append({"name": branch})
233 branches.append({"name": branch})
234 return branches
234 return branches
235
235
236 def nodebranchnodefault(ctx):
236 def nodebranchnodefault(ctx):
237 branches = []
237 branches = []
238 branch = ctx.branch()
238 branch = ctx.branch()
239 if branch != 'default':
239 if branch != 'default':
240 branches.append({"name": branch})
240 branches.append({"name": branch})
241 return branches
241 return branches
242
242
243 def showtag(repo, tmpl, t1, node=nullid, **args):
243 def showtag(repo, tmpl, t1, node=nullid, **args):
244 for t in repo.nodetags(node):
244 for t in repo.nodetags(node):
245 yield tmpl(t1, tag=t, **args)
245 yield tmpl(t1, tag=t, **args)
246
246
247 def showbookmark(repo, tmpl, t1, node=nullid, **args):
247 def showbookmark(repo, tmpl, t1, node=nullid, **args):
248 for t in repo.nodebookmarks(node):
248 for t in repo.nodebookmarks(node):
249 yield tmpl(t1, bookmark=t, **args)
249 yield tmpl(t1, bookmark=t, **args)
250
250
251 def branchentries(repo, stripecount, limit=0):
251 def branchentries(repo, stripecount, limit=0):
252 tips = []
252 tips = []
253 heads = repo.heads()
253 heads = repo.heads()
254 parity = paritygen(stripecount)
254 parity = paritygen(stripecount)
255 sortkey = lambda item: (not item[1], item[0].rev())
255 sortkey = lambda item: (not item[1], item[0].rev())
256
256
257 def entries(**map):
257 def entries(**map):
258 count = 0
258 count = 0
259 if not tips:
259 if not tips:
260 for tag, hs, tip, closed in repo.branchmap().iterbranches():
260 for tag, hs, tip, closed in repo.branchmap().iterbranches():
261 tips.append((repo[tip], closed))
261 tips.append((repo[tip], closed))
262 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
262 for ctx, closed in sorted(tips, key=sortkey, reverse=True):
263 if limit > 0 and count >= limit:
263 if limit > 0 and count >= limit:
264 return
264 return
265 count += 1
265 count += 1
266 if closed:
266 if closed:
267 status = 'closed'
267 status = 'closed'
268 elif ctx.node() not in heads:
268 elif ctx.node() not in heads:
269 status = 'inactive'
269 status = 'inactive'
270 else:
270 else:
271 status = 'open'
271 status = 'open'
272 yield {
272 yield {
273 'parity': next(parity),
273 'parity': next(parity),
274 'branch': ctx.branch(),
274 'branch': ctx.branch(),
275 'status': status,
275 'status': status,
276 'node': ctx.hex(),
276 'node': ctx.hex(),
277 'date': ctx.date()
277 'date': ctx.date()
278 }
278 }
279
279
280 return entries
280 return entries
281
281
282 def cleanpath(repo, path):
282 def cleanpath(repo, path):
283 path = path.lstrip('/')
283 path = path.lstrip('/')
284 return pathutil.canonpath(repo.root, '', path)
284 return pathutil.canonpath(repo.root, '', path)
285
285
286 def changeidctx(repo, changeid):
286 def changeidctx(repo, changeid):
287 try:
287 try:
288 ctx = repo[changeid]
288 ctx = repo[changeid]
289 except error.RepoError:
289 except error.RepoError:
290 man = repo.manifestlog._revlog
290 man = repo.manifestlog._revlog
291 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
291 ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))]
292
292
293 return ctx
293 return ctx
294
294
295 def changectx(repo, req):
295 def changectx(repo, req):
296 changeid = "tip"
296 changeid = "tip"
297 if 'node' in req.form:
297 if 'node' in req.form:
298 changeid = req.form['node'][0]
298 changeid = req.form['node'][0]
299 ipos = changeid.find(':')
299 ipos = changeid.find(':')
300 if ipos != -1:
300 if ipos != -1:
301 changeid = changeid[(ipos + 1):]
301 changeid = changeid[(ipos + 1):]
302 elif 'manifest' in req.form:
302 elif 'manifest' in req.form:
303 changeid = req.form['manifest'][0]
303 changeid = req.form['manifest'][0]
304
304
305 return changeidctx(repo, changeid)
305 return changeidctx(repo, changeid)
306
306
307 def basechangectx(repo, req):
307 def basechangectx(repo, req):
308 if 'node' in req.form:
308 if 'node' in req.form:
309 changeid = req.form['node'][0]
309 changeid = req.form['node'][0]
310 ipos = changeid.find(':')
310 ipos = changeid.find(':')
311 if ipos != -1:
311 if ipos != -1:
312 changeid = changeid[:ipos]
312 changeid = changeid[:ipos]
313 return changeidctx(repo, changeid)
313 return changeidctx(repo, changeid)
314
314
315 return None
315 return None
316
316
317 def filectx(repo, req):
317 def filectx(repo, req):
318 if 'file' not in req.form:
318 if 'file' not in req.form:
319 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
319 raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
320 path = cleanpath(repo, req.form['file'][0])
320 path = cleanpath(repo, req.form['file'][0])
321 if 'node' in req.form:
321 if 'node' in req.form:
322 changeid = req.form['node'][0]
322 changeid = req.form['node'][0]
323 elif 'filenode' in req.form:
323 elif 'filenode' in req.form:
324 changeid = req.form['filenode'][0]
324 changeid = req.form['filenode'][0]
325 else:
325 else:
326 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
326 raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
327 try:
327 try:
328 fctx = repo[changeid][path]
328 fctx = repo[changeid][path]
329 except error.RepoError:
329 except error.RepoError:
330 fctx = repo.filectx(path, fileid=changeid)
330 fctx = repo.filectx(path, fileid=changeid)
331
331
332 return fctx
332 return fctx
333
333
334 def linerange(req):
334 def linerange(req):
335 linerange = req.form.get('linerange')
335 linerange = req.form.get('linerange')
336 if linerange is None:
336 if linerange is None:
337 return None
337 return None
338 if len(linerange) > 1:
338 if len(linerange) > 1:
339 raise ErrorResponse(HTTP_BAD_REQUEST,
339 raise ErrorResponse(HTTP_BAD_REQUEST,
340 'redundant linerange parameter')
340 'redundant linerange parameter')
341 try:
341 try:
342 fromline, toline = map(int, linerange[0].split(':', 1))
342 fromline, toline = map(int, linerange[0].split(':', 1))
343 except ValueError:
343 except ValueError:
344 raise ErrorResponse(HTTP_BAD_REQUEST,
344 raise ErrorResponse(HTTP_BAD_REQUEST,
345 'invalid linerange parameter')
345 'invalid linerange parameter')
346 try:
346 try:
347 return util.processlinerange(fromline, toline)
347 return util.processlinerange(fromline, toline)
348 except error.ParseError as exc:
348 except error.ParseError as exc:
349 raise ErrorResponse(HTTP_BAD_REQUEST, str(exc))
349 raise ErrorResponse(HTTP_BAD_REQUEST, str(exc))
350
350
351 def formatlinerange(fromline, toline):
351 def formatlinerange(fromline, toline):
352 return '%d:%d' % (fromline + 1, toline)
352 return '%d:%d' % (fromline + 1, toline)
353
353
354 def commonentry(repo, ctx):
354 def commonentry(repo, ctx):
355 node = ctx.node()
355 node = ctx.node()
356 return {
356 return {
357 'rev': ctx.rev(),
357 'rev': ctx.rev(),
358 'node': hex(node),
358 'node': hex(node),
359 'author': ctx.user(),
359 'author': ctx.user(),
360 'desc': ctx.description(),
360 'desc': ctx.description(),
361 'date': ctx.date(),
361 'date': ctx.date(),
362 'extra': ctx.extra(),
362 'extra': ctx.extra(),
363 'phase': ctx.phasestr(),
363 'phase': ctx.phasestr(),
364 'obsolete': ctx.obsolete(),
364 'obsolete': ctx.obsolete(),
365 'instabilities': [{"name": i} for i in ctx.instabilities()],
365 'branch': nodebranchnodefault(ctx),
366 'branch': nodebranchnodefault(ctx),
366 'inbranch': nodeinbranch(repo, ctx),
367 'inbranch': nodeinbranch(repo, ctx),
367 'branches': nodebranchdict(repo, ctx),
368 'branches': nodebranchdict(repo, ctx),
368 'tags': nodetagsdict(repo, node),
369 'tags': nodetagsdict(repo, node),
369 'bookmarks': nodebookmarksdict(repo, node),
370 'bookmarks': nodebookmarksdict(repo, node),
370 'parent': lambda **x: parents(ctx),
371 'parent': lambda **x: parents(ctx),
371 'child': lambda **x: children(ctx),
372 'child': lambda **x: children(ctx),
372 }
373 }
373
374
374 def changelistentry(web, ctx, tmpl):
375 def changelistentry(web, ctx, tmpl):
375 '''Obtain a dictionary to be used for entries in a changelist.
376 '''Obtain a dictionary to be used for entries in a changelist.
376
377
377 This function is called when producing items for the "entries" list passed
378 This function is called when producing items for the "entries" list passed
378 to the "shortlog" and "changelog" templates.
379 to the "shortlog" and "changelog" templates.
379 '''
380 '''
380 repo = web.repo
381 repo = web.repo
381 rev = ctx.rev()
382 rev = ctx.rev()
382 n = ctx.node()
383 n = ctx.node()
383 showtags = showtag(repo, tmpl, 'changelogtag', n)
384 showtags = showtag(repo, tmpl, 'changelogtag', n)
384 files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
385 files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles)
385
386
386 entry = commonentry(repo, ctx)
387 entry = commonentry(repo, ctx)
387 entry.update(
388 entry.update(
388 allparents=lambda **x: parents(ctx),
389 allparents=lambda **x: parents(ctx),
389 parent=lambda **x: parents(ctx, rev - 1),
390 parent=lambda **x: parents(ctx, rev - 1),
390 child=lambda **x: children(ctx, rev + 1),
391 child=lambda **x: children(ctx, rev + 1),
391 changelogtag=showtags,
392 changelogtag=showtags,
392 files=files,
393 files=files,
393 )
394 )
394 return entry
395 return entry
395
396
396 def symrevorshortnode(req, ctx):
397 def symrevorshortnode(req, ctx):
397 if 'node' in req.form:
398 if 'node' in req.form:
398 return templatefilters.revescape(req.form['node'][0])
399 return templatefilters.revescape(req.form['node'][0])
399 else:
400 else:
400 return short(ctx.node())
401 return short(ctx.node())
401
402
402 def changesetentry(web, req, tmpl, ctx):
403 def changesetentry(web, req, tmpl, ctx):
403 '''Obtain a dictionary to be used to render the "changeset" template.'''
404 '''Obtain a dictionary to be used to render the "changeset" template.'''
404
405
405 showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node())
406 showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node())
406 showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark',
407 showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark',
407 ctx.node())
408 ctx.node())
408 showbranch = nodebranchnodefault(ctx)
409 showbranch = nodebranchnodefault(ctx)
409
410
410 files = []
411 files = []
411 parity = paritygen(web.stripecount)
412 parity = paritygen(web.stripecount)
412 for blockno, f in enumerate(ctx.files()):
413 for blockno, f in enumerate(ctx.files()):
413 template = f in ctx and 'filenodelink' or 'filenolink'
414 template = f in ctx and 'filenodelink' or 'filenolink'
414 files.append(tmpl(template,
415 files.append(tmpl(template,
415 node=ctx.hex(), file=f, blockno=blockno + 1,
416 node=ctx.hex(), file=f, blockno=blockno + 1,
416 parity=next(parity)))
417 parity=next(parity)))
417
418
418 basectx = basechangectx(web.repo, req)
419 basectx = basechangectx(web.repo, req)
419 if basectx is None:
420 if basectx is None:
420 basectx = ctx.p1()
421 basectx = ctx.p1()
421
422
422 style = web.config('web', 'style')
423 style = web.config('web', 'style')
423 if 'style' in req.form:
424 if 'style' in req.form:
424 style = req.form['style'][0]
425 style = req.form['style'][0]
425
426
426 diff = diffs(web, tmpl, ctx, basectx, None, style)
427 diff = diffs(web, tmpl, ctx, basectx, None, style)
427
428
428 parity = paritygen(web.stripecount)
429 parity = paritygen(web.stripecount)
429 diffstatsgen = diffstatgen(ctx, basectx)
430 diffstatsgen = diffstatgen(ctx, basectx)
430 diffstats = diffstat(tmpl, ctx, diffstatsgen, parity)
431 diffstats = diffstat(tmpl, ctx, diffstatsgen, parity)
431
432
432 return dict(
433 return dict(
433 diff=diff,
434 diff=diff,
434 symrev=symrevorshortnode(req, ctx),
435 symrev=symrevorshortnode(req, ctx),
435 basenode=basectx.hex(),
436 basenode=basectx.hex(),
436 changesettag=showtags,
437 changesettag=showtags,
437 changesetbookmark=showbookmarks,
438 changesetbookmark=showbookmarks,
438 changesetbranch=showbranch,
439 changesetbranch=showbranch,
439 files=files,
440 files=files,
440 diffsummary=lambda **x: diffsummary(diffstatsgen),
441 diffsummary=lambda **x: diffsummary(diffstatsgen),
441 diffstat=diffstats,
442 diffstat=diffstats,
442 archives=web.archivelist(ctx.hex()),
443 archives=web.archivelist(ctx.hex()),
443 **commonentry(web.repo, ctx))
444 **commonentry(web.repo, ctx))
444
445
445 def listfilediffs(tmpl, files, node, max):
446 def listfilediffs(tmpl, files, node, max):
446 for f in files[:max]:
447 for f in files[:max]:
447 yield tmpl('filedifflink', node=hex(node), file=f)
448 yield tmpl('filedifflink', node=hex(node), file=f)
448 if len(files) > max:
449 if len(files) > max:
449 yield tmpl('fileellipses')
450 yield tmpl('fileellipses')
450
451
451 def diffs(web, tmpl, ctx, basectx, files, style, linerange=None,
452 def diffs(web, tmpl, ctx, basectx, files, style, linerange=None,
452 lineidprefix=''):
453 lineidprefix=''):
453
454
454 def prettyprintlines(lines, blockno):
455 def prettyprintlines(lines, blockno):
455 for lineno, l in enumerate(lines, 1):
456 for lineno, l in enumerate(lines, 1):
456 difflineno = "%d.%d" % (blockno, lineno)
457 difflineno = "%d.%d" % (blockno, lineno)
457 if l.startswith('+'):
458 if l.startswith('+'):
458 ltype = "difflineplus"
459 ltype = "difflineplus"
459 elif l.startswith('-'):
460 elif l.startswith('-'):
460 ltype = "difflineminus"
461 ltype = "difflineminus"
461 elif l.startswith('@'):
462 elif l.startswith('@'):
462 ltype = "difflineat"
463 ltype = "difflineat"
463 else:
464 else:
464 ltype = "diffline"
465 ltype = "diffline"
465 yield tmpl(ltype,
466 yield tmpl(ltype,
466 line=l,
467 line=l,
467 lineno=lineno,
468 lineno=lineno,
468 lineid=lineidprefix + "l%s" % difflineno,
469 lineid=lineidprefix + "l%s" % difflineno,
469 linenumber="% 8s" % difflineno)
470 linenumber="% 8s" % difflineno)
470
471
471 repo = web.repo
472 repo = web.repo
472 if files:
473 if files:
473 m = match.exact(repo.root, repo.getcwd(), files)
474 m = match.exact(repo.root, repo.getcwd(), files)
474 else:
475 else:
475 m = match.always(repo.root, repo.getcwd())
476 m = match.always(repo.root, repo.getcwd())
476
477
477 diffopts = patch.diffopts(repo.ui, untrusted=True)
478 diffopts = patch.diffopts(repo.ui, untrusted=True)
478 node1 = basectx.node()
479 node1 = basectx.node()
479 node2 = ctx.node()
480 node2 = ctx.node()
480 parity = paritygen(web.stripecount)
481 parity = paritygen(web.stripecount)
481
482
482 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
483 diffhunks = patch.diffhunks(repo, node1, node2, m, opts=diffopts)
483 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
484 for blockno, (fctx1, fctx2, header, hunks) in enumerate(diffhunks, 1):
484 if style != 'raw':
485 if style != 'raw':
485 header = header[1:]
486 header = header[1:]
486 lines = [h + '\n' for h in header]
487 lines = [h + '\n' for h in header]
487 for hunkrange, hunklines in hunks:
488 for hunkrange, hunklines in hunks:
488 if linerange is not None and hunkrange is not None:
489 if linerange is not None and hunkrange is not None:
489 s1, l1, s2, l2 = hunkrange
490 s1, l1, s2, l2 = hunkrange
490 if not mdiff.hunkinrange((s2, l2), linerange):
491 if not mdiff.hunkinrange((s2, l2), linerange):
491 continue
492 continue
492 lines.extend(hunklines)
493 lines.extend(hunklines)
493 if lines:
494 if lines:
494 yield tmpl('diffblock', parity=next(parity), blockno=blockno,
495 yield tmpl('diffblock', parity=next(parity), blockno=blockno,
495 lines=prettyprintlines(lines, blockno))
496 lines=prettyprintlines(lines, blockno))
496
497
497 def compare(tmpl, context, leftlines, rightlines):
498 def compare(tmpl, context, leftlines, rightlines):
498 '''Generator function that provides side-by-side comparison data.'''
499 '''Generator function that provides side-by-side comparison data.'''
499
500
500 def compline(type, leftlineno, leftline, rightlineno, rightline):
501 def compline(type, leftlineno, leftline, rightlineno, rightline):
501 lineid = leftlineno and ("l%s" % leftlineno) or ''
502 lineid = leftlineno and ("l%s" % leftlineno) or ''
502 lineid += rightlineno and ("r%s" % rightlineno) or ''
503 lineid += rightlineno and ("r%s" % rightlineno) or ''
503 return tmpl('comparisonline',
504 return tmpl('comparisonline',
504 type=type,
505 type=type,
505 lineid=lineid,
506 lineid=lineid,
506 leftlineno=leftlineno,
507 leftlineno=leftlineno,
507 leftlinenumber="% 6s" % (leftlineno or ''),
508 leftlinenumber="% 6s" % (leftlineno or ''),
508 leftline=leftline or '',
509 leftline=leftline or '',
509 rightlineno=rightlineno,
510 rightlineno=rightlineno,
510 rightlinenumber="% 6s" % (rightlineno or ''),
511 rightlinenumber="% 6s" % (rightlineno or ''),
511 rightline=rightline or '')
512 rightline=rightline or '')
512
513
513 def getblock(opcodes):
514 def getblock(opcodes):
514 for type, llo, lhi, rlo, rhi in opcodes:
515 for type, llo, lhi, rlo, rhi in opcodes:
515 len1 = lhi - llo
516 len1 = lhi - llo
516 len2 = rhi - rlo
517 len2 = rhi - rlo
517 count = min(len1, len2)
518 count = min(len1, len2)
518 for i in xrange(count):
519 for i in xrange(count):
519 yield compline(type=type,
520 yield compline(type=type,
520 leftlineno=llo + i + 1,
521 leftlineno=llo + i + 1,
521 leftline=leftlines[llo + i],
522 leftline=leftlines[llo + i],
522 rightlineno=rlo + i + 1,
523 rightlineno=rlo + i + 1,
523 rightline=rightlines[rlo + i])
524 rightline=rightlines[rlo + i])
524 if len1 > len2:
525 if len1 > len2:
525 for i in xrange(llo + count, lhi):
526 for i in xrange(llo + count, lhi):
526 yield compline(type=type,
527 yield compline(type=type,
527 leftlineno=i + 1,
528 leftlineno=i + 1,
528 leftline=leftlines[i],
529 leftline=leftlines[i],
529 rightlineno=None,
530 rightlineno=None,
530 rightline=None)
531 rightline=None)
531 elif len2 > len1:
532 elif len2 > len1:
532 for i in xrange(rlo + count, rhi):
533 for i in xrange(rlo + count, rhi):
533 yield compline(type=type,
534 yield compline(type=type,
534 leftlineno=None,
535 leftlineno=None,
535 leftline=None,
536 leftline=None,
536 rightlineno=i + 1,
537 rightlineno=i + 1,
537 rightline=rightlines[i])
538 rightline=rightlines[i])
538
539
539 s = difflib.SequenceMatcher(None, leftlines, rightlines)
540 s = difflib.SequenceMatcher(None, leftlines, rightlines)
540 if context < 0:
541 if context < 0:
541 yield tmpl('comparisonblock', lines=getblock(s.get_opcodes()))
542 yield tmpl('comparisonblock', lines=getblock(s.get_opcodes()))
542 else:
543 else:
543 for oc in s.get_grouped_opcodes(n=context):
544 for oc in s.get_grouped_opcodes(n=context):
544 yield tmpl('comparisonblock', lines=getblock(oc))
545 yield tmpl('comparisonblock', lines=getblock(oc))
545
546
546 def diffstatgen(ctx, basectx):
547 def diffstatgen(ctx, basectx):
547 '''Generator function that provides the diffstat data.'''
548 '''Generator function that provides the diffstat data.'''
548
549
549 stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx)))
550 stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx)))
550 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
551 maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats)
551 while True:
552 while True:
552 yield stats, maxname, maxtotal, addtotal, removetotal, binary
553 yield stats, maxname, maxtotal, addtotal, removetotal, binary
553
554
554 def diffsummary(statgen):
555 def diffsummary(statgen):
555 '''Return a short summary of the diff.'''
556 '''Return a short summary of the diff.'''
556
557
557 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
558 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
558 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
559 return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % (
559 len(stats), addtotal, removetotal)
560 len(stats), addtotal, removetotal)
560
561
561 def diffstat(tmpl, ctx, statgen, parity):
562 def diffstat(tmpl, ctx, statgen, parity):
562 '''Return a diffstat template for each file in the diff.'''
563 '''Return a diffstat template for each file in the diff.'''
563
564
564 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
565 stats, maxname, maxtotal, addtotal, removetotal, binary = next(statgen)
565 files = ctx.files()
566 files = ctx.files()
566
567
567 def pct(i):
568 def pct(i):
568 if maxtotal == 0:
569 if maxtotal == 0:
569 return 0
570 return 0
570 return (float(i) / maxtotal) * 100
571 return (float(i) / maxtotal) * 100
571
572
572 fileno = 0
573 fileno = 0
573 for filename, adds, removes, isbinary in stats:
574 for filename, adds, removes, isbinary in stats:
574 template = filename in files and 'diffstatlink' or 'diffstatnolink'
575 template = filename in files and 'diffstatlink' or 'diffstatnolink'
575 total = adds + removes
576 total = adds + removes
576 fileno += 1
577 fileno += 1
577 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
578 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno,
578 total=total, addpct=pct(adds), removepct=pct(removes),
579 total=total, addpct=pct(adds), removepct=pct(removes),
579 parity=next(parity))
580 parity=next(parity))
580
581
581 class sessionvars(object):
582 class sessionvars(object):
582 def __init__(self, vars, start='?'):
583 def __init__(self, vars, start='?'):
583 self.start = start
584 self.start = start
584 self.vars = vars
585 self.vars = vars
585 def __getitem__(self, key):
586 def __getitem__(self, key):
586 return self.vars[key]
587 return self.vars[key]
587 def __setitem__(self, key, value):
588 def __setitem__(self, key, value):
588 self.vars[key] = value
589 self.vars[key] = value
589 def __copy__(self):
590 def __copy__(self):
590 return sessionvars(copy.copy(self.vars), self.start)
591 return sessionvars(copy.copy(self.vars), self.start)
591 def __iter__(self):
592 def __iter__(self):
592 separator = self.start
593 separator = self.start
593 for key, value in sorted(self.vars.iteritems()):
594 for key, value in sorted(self.vars.iteritems()):
594 yield {'name': key,
595 yield {'name': key,
595 'value': pycompat.bytestr(value),
596 'value': pycompat.bytestr(value),
596 'separator': separator,
597 'separator': separator,
597 }
598 }
598 separator = '&'
599 separator = '&'
599
600
600 class wsgiui(uimod.ui):
601 class wsgiui(uimod.ui):
601 # default termwidth breaks under mod_wsgi
602 # default termwidth breaks under mod_wsgi
602 def termwidth(self):
603 def termwidth(self):
603 return 80
604 return 80
604
605
605 def getwebsubs(repo):
606 def getwebsubs(repo):
606 websubtable = []
607 websubtable = []
607 websubdefs = repo.ui.configitems('websub')
608 websubdefs = repo.ui.configitems('websub')
608 # we must maintain interhg backwards compatibility
609 # we must maintain interhg backwards compatibility
609 websubdefs += repo.ui.configitems('interhg')
610 websubdefs += repo.ui.configitems('interhg')
610 for key, pattern in websubdefs:
611 for key, pattern in websubdefs:
611 # grab the delimiter from the character after the "s"
612 # grab the delimiter from the character after the "s"
612 unesc = pattern[1]
613 unesc = pattern[1]
613 delim = re.escape(unesc)
614 delim = re.escape(unesc)
614
615
615 # identify portions of the pattern, taking care to avoid escaped
616 # identify portions of the pattern, taking care to avoid escaped
616 # delimiters. the replace format and flags are optional, but
617 # delimiters. the replace format and flags are optional, but
617 # delimiters are required.
618 # delimiters are required.
618 match = re.match(
619 match = re.match(
619 r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
620 r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
620 % (delim, delim, delim), pattern)
621 % (delim, delim, delim), pattern)
621 if not match:
622 if not match:
622 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
623 repo.ui.warn(_("websub: invalid pattern for %s: %s\n")
623 % (key, pattern))
624 % (key, pattern))
624 continue
625 continue
625
626
626 # we need to unescape the delimiter for regexp and format
627 # we need to unescape the delimiter for regexp and format
627 delim_re = re.compile(r'(?<!\\)\\%s' % delim)
628 delim_re = re.compile(r'(?<!\\)\\%s' % delim)
628 regexp = delim_re.sub(unesc, match.group(1))
629 regexp = delim_re.sub(unesc, match.group(1))
629 format = delim_re.sub(unesc, match.group(2))
630 format = delim_re.sub(unesc, match.group(2))
630
631
631 # the pattern allows for 6 regexp flags, so set them if necessary
632 # the pattern allows for 6 regexp flags, so set them if necessary
632 flagin = match.group(3)
633 flagin = match.group(3)
633 flags = 0
634 flags = 0
634 if flagin:
635 if flagin:
635 for flag in flagin.upper():
636 for flag in flagin.upper():
636 flags |= re.__dict__[flag]
637 flags |= re.__dict__[flag]
637
638
638 try:
639 try:
639 regexp = re.compile(regexp, flags)
640 regexp = re.compile(regexp, flags)
640 websubtable.append((regexp, format))
641 websubtable.append((regexp, format))
641 except re.error:
642 except re.error:
642 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
643 repo.ui.warn(_("websub: invalid regexp for %s: %s\n")
643 % (key, regexp))
644 % (key, regexp))
644 return websubtable
645 return websubtable
General Comments 0
You need to be logged in to leave comments. Login now