##// END OF EJS Templates
context: drop support for changeid of type long (API?)...
Martin von Zweigbergk -
r37235:97ab6f2d default
parent child Browse files
Show More
@@ -1,2600 +1,2598 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from . import (
29 from . import (
30 dagop,
30 dagop,
31 encoding,
31 encoding,
32 error,
32 error,
33 fileset,
33 fileset,
34 match as matchmod,
34 match as matchmod,
35 obsolete as obsmod,
35 obsolete as obsmod,
36 obsutil,
36 obsutil,
37 patch,
37 patch,
38 pathutil,
38 pathutil,
39 phases,
39 phases,
40 pycompat,
40 pycompat,
41 repoview,
41 repoview,
42 revlog,
42 revlog,
43 scmutil,
43 scmutil,
44 sparse,
44 sparse,
45 subrepo,
45 subrepo,
46 subrepoutil,
46 subrepoutil,
47 util,
47 util,
48 )
48 )
49 from .utils import (
49 from .utils import (
50 dateutil,
50 dateutil,
51 stringutil,
51 stringutil,
52 )
52 )
53
53
54 propertycache = util.propertycache
54 propertycache = util.propertycache
55
55
56 nonascii = re.compile(br'[^\x21-\x7f]').search
56 nonascii = re.compile(br'[^\x21-\x7f]').search
57
57
58 class basectx(object):
58 class basectx(object):
59 """A basectx object represents the common logic for its children:
59 """A basectx object represents the common logic for its children:
60 changectx: read-only context that is already present in the repo,
60 changectx: read-only context that is already present in the repo,
61 workingctx: a context that represents the working directory and can
61 workingctx: a context that represents the working directory and can
62 be committed,
62 be committed,
63 memctx: a context that represents changes in-memory and can also
63 memctx: a context that represents changes in-memory and can also
64 be committed."""
64 be committed."""
65
65
66 def __init__(self, repo):
66 def __init__(self, repo):
67 self._repo = repo
67 self._repo = repo
68
68
69 def __bytes__(self):
69 def __bytes__(self):
70 return short(self.node())
70 return short(self.node())
71
71
72 __str__ = encoding.strmethod(__bytes__)
72 __str__ = encoding.strmethod(__bytes__)
73
73
74 def __repr__(self):
74 def __repr__(self):
75 return r"<%s %s>" % (type(self).__name__, str(self))
75 return r"<%s %s>" % (type(self).__name__, str(self))
76
76
77 def __eq__(self, other):
77 def __eq__(self, other):
78 try:
78 try:
79 return type(self) == type(other) and self._rev == other._rev
79 return type(self) == type(other) and self._rev == other._rev
80 except AttributeError:
80 except AttributeError:
81 return False
81 return False
82
82
83 def __ne__(self, other):
83 def __ne__(self, other):
84 return not (self == other)
84 return not (self == other)
85
85
86 def __contains__(self, key):
86 def __contains__(self, key):
87 return key in self._manifest
87 return key in self._manifest
88
88
89 def __getitem__(self, key):
89 def __getitem__(self, key):
90 return self.filectx(key)
90 return self.filectx(key)
91
91
92 def __iter__(self):
92 def __iter__(self):
93 return iter(self._manifest)
93 return iter(self._manifest)
94
94
95 def _buildstatusmanifest(self, status):
95 def _buildstatusmanifest(self, status):
96 """Builds a manifest that includes the given status results, if this is
96 """Builds a manifest that includes the given status results, if this is
97 a working copy context. For non-working copy contexts, it just returns
97 a working copy context. For non-working copy contexts, it just returns
98 the normal manifest."""
98 the normal manifest."""
99 return self.manifest()
99 return self.manifest()
100
100
101 def _matchstatus(self, other, match):
101 def _matchstatus(self, other, match):
102 """This internal method provides a way for child objects to override the
102 """This internal method provides a way for child objects to override the
103 match operator.
103 match operator.
104 """
104 """
105 return match
105 return match
106
106
107 def _buildstatus(self, other, s, match, listignored, listclean,
107 def _buildstatus(self, other, s, match, listignored, listclean,
108 listunknown):
108 listunknown):
109 """build a status with respect to another context"""
109 """build a status with respect to another context"""
110 # Load earliest manifest first for caching reasons. More specifically,
110 # Load earliest manifest first for caching reasons. More specifically,
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
111 # if you have revisions 1000 and 1001, 1001 is probably stored as a
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
112 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
113 # 1000 and cache it so that when you read 1001, we just need to apply a
113 # 1000 and cache it so that when you read 1001, we just need to apply a
114 # delta to what's in the cache. So that's one full reconstruction + one
114 # delta to what's in the cache. So that's one full reconstruction + one
115 # delta application.
115 # delta application.
116 mf2 = None
116 mf2 = None
117 if self.rev() is not None and self.rev() < other.rev():
117 if self.rev() is not None and self.rev() < other.rev():
118 mf2 = self._buildstatusmanifest(s)
118 mf2 = self._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
119 mf1 = other._buildstatusmanifest(s)
120 if mf2 is None:
120 if mf2 is None:
121 mf2 = self._buildstatusmanifest(s)
121 mf2 = self._buildstatusmanifest(s)
122
122
123 modified, added = [], []
123 modified, added = [], []
124 removed = []
124 removed = []
125 clean = []
125 clean = []
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
126 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
127 deletedset = set(deleted)
127 deletedset = set(deleted)
128 d = mf1.diff(mf2, match=match, clean=listclean)
128 d = mf1.diff(mf2, match=match, clean=listclean)
129 for fn, value in d.iteritems():
129 for fn, value in d.iteritems():
130 if fn in deletedset:
130 if fn in deletedset:
131 continue
131 continue
132 if value is None:
132 if value is None:
133 clean.append(fn)
133 clean.append(fn)
134 continue
134 continue
135 (node1, flag1), (node2, flag2) = value
135 (node1, flag1), (node2, flag2) = value
136 if node1 is None:
136 if node1 is None:
137 added.append(fn)
137 added.append(fn)
138 elif node2 is None:
138 elif node2 is None:
139 removed.append(fn)
139 removed.append(fn)
140 elif flag1 != flag2:
140 elif flag1 != flag2:
141 modified.append(fn)
141 modified.append(fn)
142 elif node2 not in wdirnodes:
142 elif node2 not in wdirnodes:
143 # When comparing files between two commits, we save time by
143 # When comparing files between two commits, we save time by
144 # not comparing the file contents when the nodeids differ.
144 # not comparing the file contents when the nodeids differ.
145 # Note that this means we incorrectly report a reverted change
145 # Note that this means we incorrectly report a reverted change
146 # to a file as a modification.
146 # to a file as a modification.
147 modified.append(fn)
147 modified.append(fn)
148 elif self[fn].cmp(other[fn]):
148 elif self[fn].cmp(other[fn]):
149 modified.append(fn)
149 modified.append(fn)
150 else:
150 else:
151 clean.append(fn)
151 clean.append(fn)
152
152
153 if removed:
153 if removed:
154 # need to filter files if they are already reported as removed
154 # need to filter files if they are already reported as removed
155 unknown = [fn for fn in unknown if fn not in mf1 and
155 unknown = [fn for fn in unknown if fn not in mf1 and
156 (not match or match(fn))]
156 (not match or match(fn))]
157 ignored = [fn for fn in ignored if fn not in mf1 and
157 ignored = [fn for fn in ignored if fn not in mf1 and
158 (not match or match(fn))]
158 (not match or match(fn))]
159 # if they're deleted, don't report them as removed
159 # if they're deleted, don't report them as removed
160 removed = [fn for fn in removed if fn not in deletedset]
160 removed = [fn for fn in removed if fn not in deletedset]
161
161
162 return scmutil.status(modified, added, removed, deleted, unknown,
162 return scmutil.status(modified, added, removed, deleted, unknown,
163 ignored, clean)
163 ignored, clean)
164
164
165 @propertycache
165 @propertycache
166 def substate(self):
166 def substate(self):
167 return subrepoutil.state(self, self._repo.ui)
167 return subrepoutil.state(self, self._repo.ui)
168
168
169 def subrev(self, subpath):
169 def subrev(self, subpath):
170 return self.substate[subpath][1]
170 return self.substate[subpath][1]
171
171
172 def rev(self):
172 def rev(self):
173 return self._rev
173 return self._rev
174 def node(self):
174 def node(self):
175 return self._node
175 return self._node
176 def hex(self):
176 def hex(self):
177 return hex(self.node())
177 return hex(self.node())
178 def manifest(self):
178 def manifest(self):
179 return self._manifest
179 return self._manifest
180 def manifestctx(self):
180 def manifestctx(self):
181 return self._manifestctx
181 return self._manifestctx
182 def repo(self):
182 def repo(self):
183 return self._repo
183 return self._repo
184 def phasestr(self):
184 def phasestr(self):
185 return phases.phasenames[self.phase()]
185 return phases.phasenames[self.phase()]
186 def mutable(self):
186 def mutable(self):
187 return self.phase() > phases.public
187 return self.phase() > phases.public
188
188
189 def getfileset(self, expr):
189 def getfileset(self, expr):
190 return fileset.getfileset(self, expr)
190 return fileset.getfileset(self, expr)
191
191
192 def obsolete(self):
192 def obsolete(self):
193 """True if the changeset is obsolete"""
193 """True if the changeset is obsolete"""
194 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
194 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
195
195
196 def extinct(self):
196 def extinct(self):
197 """True if the changeset is extinct"""
197 """True if the changeset is extinct"""
198 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
198 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
199
199
200 def orphan(self):
200 def orphan(self):
201 """True if the changeset is not obsolete but it's ancestor are"""
201 """True if the changeset is not obsolete but it's ancestor are"""
202 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
202 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
203
203
204 def phasedivergent(self):
204 def phasedivergent(self):
205 """True if the changeset try to be a successor of a public changeset
205 """True if the changeset try to be a successor of a public changeset
206
206
207 Only non-public and non-obsolete changesets may be bumped.
207 Only non-public and non-obsolete changesets may be bumped.
208 """
208 """
209 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
209 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
210
210
211 def contentdivergent(self):
211 def contentdivergent(self):
212 """Is a successors of a changeset with multiple possible successors set
212 """Is a successors of a changeset with multiple possible successors set
213
213
214 Only non-public and non-obsolete changesets may be divergent.
214 Only non-public and non-obsolete changesets may be divergent.
215 """
215 """
216 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
216 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
217
217
218 def isunstable(self):
218 def isunstable(self):
219 """True if the changeset is either unstable, bumped or divergent"""
219 """True if the changeset is either unstable, bumped or divergent"""
220 return self.orphan() or self.phasedivergent() or self.contentdivergent()
220 return self.orphan() or self.phasedivergent() or self.contentdivergent()
221
221
222 def instabilities(self):
222 def instabilities(self):
223 """return the list of instabilities affecting this changeset.
223 """return the list of instabilities affecting this changeset.
224
224
225 Instabilities are returned as strings. possible values are:
225 Instabilities are returned as strings. possible values are:
226 - orphan,
226 - orphan,
227 - phase-divergent,
227 - phase-divergent,
228 - content-divergent.
228 - content-divergent.
229 """
229 """
230 instabilities = []
230 instabilities = []
231 if self.orphan():
231 if self.orphan():
232 instabilities.append('orphan')
232 instabilities.append('orphan')
233 if self.phasedivergent():
233 if self.phasedivergent():
234 instabilities.append('phase-divergent')
234 instabilities.append('phase-divergent')
235 if self.contentdivergent():
235 if self.contentdivergent():
236 instabilities.append('content-divergent')
236 instabilities.append('content-divergent')
237 return instabilities
237 return instabilities
238
238
239 def parents(self):
239 def parents(self):
240 """return contexts for each parent changeset"""
240 """return contexts for each parent changeset"""
241 return self._parents
241 return self._parents
242
242
243 def p1(self):
243 def p1(self):
244 return self._parents[0]
244 return self._parents[0]
245
245
246 def p2(self):
246 def p2(self):
247 parents = self._parents
247 parents = self._parents
248 if len(parents) == 2:
248 if len(parents) == 2:
249 return parents[1]
249 return parents[1]
250 return changectx(self._repo, nullrev)
250 return changectx(self._repo, nullrev)
251
251
252 def _fileinfo(self, path):
252 def _fileinfo(self, path):
253 if r'_manifest' in self.__dict__:
253 if r'_manifest' in self.__dict__:
254 try:
254 try:
255 return self._manifest[path], self._manifest.flags(path)
255 return self._manifest[path], self._manifest.flags(path)
256 except KeyError:
256 except KeyError:
257 raise error.ManifestLookupError(self._node, path,
257 raise error.ManifestLookupError(self._node, path,
258 _('not found in manifest'))
258 _('not found in manifest'))
259 if r'_manifestdelta' in self.__dict__ or path in self.files():
259 if r'_manifestdelta' in self.__dict__ or path in self.files():
260 if path in self._manifestdelta:
260 if path in self._manifestdelta:
261 return (self._manifestdelta[path],
261 return (self._manifestdelta[path],
262 self._manifestdelta.flags(path))
262 self._manifestdelta.flags(path))
263 mfl = self._repo.manifestlog
263 mfl = self._repo.manifestlog
264 try:
264 try:
265 node, flag = mfl[self._changeset.manifest].find(path)
265 node, flag = mfl[self._changeset.manifest].find(path)
266 except KeyError:
266 except KeyError:
267 raise error.ManifestLookupError(self._node, path,
267 raise error.ManifestLookupError(self._node, path,
268 _('not found in manifest'))
268 _('not found in manifest'))
269
269
270 return node, flag
270 return node, flag
271
271
272 def filenode(self, path):
272 def filenode(self, path):
273 return self._fileinfo(path)[0]
273 return self._fileinfo(path)[0]
274
274
275 def flags(self, path):
275 def flags(self, path):
276 try:
276 try:
277 return self._fileinfo(path)[1]
277 return self._fileinfo(path)[1]
278 except error.LookupError:
278 except error.LookupError:
279 return ''
279 return ''
280
280
281 def sub(self, path, allowcreate=True):
281 def sub(self, path, allowcreate=True):
282 '''return a subrepo for the stored revision of path, never wdir()'''
282 '''return a subrepo for the stored revision of path, never wdir()'''
283 return subrepo.subrepo(self, path, allowcreate=allowcreate)
283 return subrepo.subrepo(self, path, allowcreate=allowcreate)
284
284
285 def nullsub(self, path, pctx):
285 def nullsub(self, path, pctx):
286 return subrepo.nullsubrepo(self, path, pctx)
286 return subrepo.nullsubrepo(self, path, pctx)
287
287
288 def workingsub(self, path):
288 def workingsub(self, path):
289 '''return a subrepo for the stored revision, or wdir if this is a wdir
289 '''return a subrepo for the stored revision, or wdir if this is a wdir
290 context.
290 context.
291 '''
291 '''
292 return subrepo.subrepo(self, path, allowwdir=True)
292 return subrepo.subrepo(self, path, allowwdir=True)
293
293
294 def match(self, pats=None, include=None, exclude=None, default='glob',
294 def match(self, pats=None, include=None, exclude=None, default='glob',
295 listsubrepos=False, badfn=None):
295 listsubrepos=False, badfn=None):
296 r = self._repo
296 r = self._repo
297 return matchmod.match(r.root, r.getcwd(), pats,
297 return matchmod.match(r.root, r.getcwd(), pats,
298 include, exclude, default,
298 include, exclude, default,
299 auditor=r.nofsauditor, ctx=self,
299 auditor=r.nofsauditor, ctx=self,
300 listsubrepos=listsubrepos, badfn=badfn)
300 listsubrepos=listsubrepos, badfn=badfn)
301
301
302 def diff(self, ctx2=None, match=None, **opts):
302 def diff(self, ctx2=None, match=None, **opts):
303 """Returns a diff generator for the given contexts and matcher"""
303 """Returns a diff generator for the given contexts and matcher"""
304 if ctx2 is None:
304 if ctx2 is None:
305 ctx2 = self.p1()
305 ctx2 = self.p1()
306 if ctx2 is not None:
306 if ctx2 is not None:
307 ctx2 = self._repo[ctx2]
307 ctx2 = self._repo[ctx2]
308 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
308 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
309 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
309 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
310
310
311 def dirs(self):
311 def dirs(self):
312 return self._manifest.dirs()
312 return self._manifest.dirs()
313
313
314 def hasdir(self, dir):
314 def hasdir(self, dir):
315 return self._manifest.hasdir(dir)
315 return self._manifest.hasdir(dir)
316
316
317 def status(self, other=None, match=None, listignored=False,
317 def status(self, other=None, match=None, listignored=False,
318 listclean=False, listunknown=False, listsubrepos=False):
318 listclean=False, listunknown=False, listsubrepos=False):
319 """return status of files between two nodes or node and working
319 """return status of files between two nodes or node and working
320 directory.
320 directory.
321
321
322 If other is None, compare this node with working directory.
322 If other is None, compare this node with working directory.
323
323
324 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 returns (modified, added, removed, deleted, unknown, ignored, clean)
325 """
325 """
326
326
327 ctx1 = self
327 ctx1 = self
328 ctx2 = self._repo[other]
328 ctx2 = self._repo[other]
329
329
330 # This next code block is, admittedly, fragile logic that tests for
330 # This next code block is, admittedly, fragile logic that tests for
331 # reversing the contexts and wouldn't need to exist if it weren't for
331 # reversing the contexts and wouldn't need to exist if it weren't for
332 # the fast (and common) code path of comparing the working directory
332 # the fast (and common) code path of comparing the working directory
333 # with its first parent.
333 # with its first parent.
334 #
334 #
335 # What we're aiming for here is the ability to call:
335 # What we're aiming for here is the ability to call:
336 #
336 #
337 # workingctx.status(parentctx)
337 # workingctx.status(parentctx)
338 #
338 #
339 # If we always built the manifest for each context and compared those,
339 # If we always built the manifest for each context and compared those,
340 # then we'd be done. But the special case of the above call means we
340 # then we'd be done. But the special case of the above call means we
341 # just copy the manifest of the parent.
341 # just copy the manifest of the parent.
342 reversed = False
342 reversed = False
343 if (not isinstance(ctx1, changectx)
343 if (not isinstance(ctx1, changectx)
344 and isinstance(ctx2, changectx)):
344 and isinstance(ctx2, changectx)):
345 reversed = True
345 reversed = True
346 ctx1, ctx2 = ctx2, ctx1
346 ctx1, ctx2 = ctx2, ctx1
347
347
348 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
348 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
349 match = ctx2._matchstatus(ctx1, match)
349 match = ctx2._matchstatus(ctx1, match)
350 r = scmutil.status([], [], [], [], [], [], [])
350 r = scmutil.status([], [], [], [], [], [], [])
351 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
352 listunknown)
352 listunknown)
353
353
354 if reversed:
354 if reversed:
355 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # Reverse added and removed. Clear deleted, unknown and ignored as
356 # these make no sense to reverse.
356 # these make no sense to reverse.
357 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
358 r.clean)
358 r.clean)
359
359
360 if listsubrepos:
360 if listsubrepos:
361 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
362 try:
362 try:
363 rev2 = ctx2.subrev(subpath)
363 rev2 = ctx2.subrev(subpath)
364 except KeyError:
364 except KeyError:
365 # A subrepo that existed in node1 was deleted between
365 # A subrepo that existed in node1 was deleted between
366 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # node1 and node2 (inclusive). Thus, ctx2's substate
367 # won't contain that subpath. The best we can do ignore it.
367 # won't contain that subpath. The best we can do ignore it.
368 rev2 = None
368 rev2 = None
369 submatch = matchmod.subdirmatcher(subpath, match)
369 submatch = matchmod.subdirmatcher(subpath, match)
370 s = sub.status(rev2, match=submatch, ignored=listignored,
370 s = sub.status(rev2, match=submatch, ignored=listignored,
371 clean=listclean, unknown=listunknown,
371 clean=listclean, unknown=listunknown,
372 listsubrepos=True)
372 listsubrepos=True)
373 for rfiles, sfiles in zip(r, s):
373 for rfiles, sfiles in zip(r, s):
374 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
375
375
376 for l in r:
376 for l in r:
377 l.sort()
377 l.sort()
378
378
379 return r
379 return r
380
380
381 def _filterederror(repo, changeid):
381 def _filterederror(repo, changeid):
382 """build an exception to be raised about a filtered changeid
382 """build an exception to be raised about a filtered changeid
383
383
384 This is extracted in a function to help extensions (eg: evolve) to
384 This is extracted in a function to help extensions (eg: evolve) to
385 experiment with various message variants."""
385 experiment with various message variants."""
386 if repo.filtername.startswith('visible'):
386 if repo.filtername.startswith('visible'):
387
387
388 # Check if the changeset is obsolete
388 # Check if the changeset is obsolete
389 unfilteredrepo = repo.unfiltered()
389 unfilteredrepo = repo.unfiltered()
390 ctx = unfilteredrepo[changeid]
390 ctx = unfilteredrepo[changeid]
391
391
392 # If the changeset is obsolete, enrich the message with the reason
392 # If the changeset is obsolete, enrich the message with the reason
393 # that made this changeset not visible
393 # that made this changeset not visible
394 if ctx.obsolete():
394 if ctx.obsolete():
395 msg = obsutil._getfilteredreason(repo, changeid, ctx)
395 msg = obsutil._getfilteredreason(repo, changeid, ctx)
396 else:
396 else:
397 msg = _("hidden revision '%s'") % changeid
397 msg = _("hidden revision '%s'") % changeid
398
398
399 hint = _('use --hidden to access hidden revisions')
399 hint = _('use --hidden to access hidden revisions')
400
400
401 return error.FilteredRepoLookupError(msg, hint=hint)
401 return error.FilteredRepoLookupError(msg, hint=hint)
402 msg = _("filtered revision '%s' (not in '%s' subset)")
402 msg = _("filtered revision '%s' (not in '%s' subset)")
403 msg %= (changeid, repo.filtername)
403 msg %= (changeid, repo.filtername)
404 return error.FilteredRepoLookupError(msg)
404 return error.FilteredRepoLookupError(msg)
405
405
406 class changectx(basectx):
406 class changectx(basectx):
407 """A changecontext object makes access to data related to a particular
407 """A changecontext object makes access to data related to a particular
408 changeset convenient. It represents a read-only context already present in
408 changeset convenient. It represents a read-only context already present in
409 the repo."""
409 the repo."""
410 def __init__(self, repo, changeid='.'):
410 def __init__(self, repo, changeid='.'):
411 """changeid is a revision number, node, or tag"""
411 """changeid is a revision number, node, or tag"""
412 super(changectx, self).__init__(repo)
412 super(changectx, self).__init__(repo)
413
413
414 if changeid == '':
414 if changeid == '':
415 changeid = '.'
415 changeid = '.'
416
416
417 try:
417 try:
418 if isinstance(changeid, int):
418 if isinstance(changeid, int):
419 self._node = repo.changelog.node(changeid)
419 self._node = repo.changelog.node(changeid)
420 self._rev = changeid
420 self._rev = changeid
421 return
421 return
422 if not pycompat.ispy3 and isinstance(changeid, long):
423 changeid = "%d" % changeid
424 if changeid == 'null':
422 if changeid == 'null':
425 self._node = nullid
423 self._node = nullid
426 self._rev = nullrev
424 self._rev = nullrev
427 return
425 return
428 if changeid == 'tip':
426 if changeid == 'tip':
429 self._node = repo.changelog.tip()
427 self._node = repo.changelog.tip()
430 self._rev = repo.changelog.rev(self._node)
428 self._rev = repo.changelog.rev(self._node)
431 return
429 return
432 if (changeid == '.'
430 if (changeid == '.'
433 or repo.local() and changeid == repo.dirstate.p1()):
431 or repo.local() and changeid == repo.dirstate.p1()):
434 # this is a hack to delay/avoid loading obsmarkers
432 # this is a hack to delay/avoid loading obsmarkers
435 # when we know that '.' won't be hidden
433 # when we know that '.' won't be hidden
436 self._node = repo.dirstate.p1()
434 self._node = repo.dirstate.p1()
437 self._rev = repo.unfiltered().changelog.rev(self._node)
435 self._rev = repo.unfiltered().changelog.rev(self._node)
438 return
436 return
439 if len(changeid) == 20:
437 if len(changeid) == 20:
440 try:
438 try:
441 self._node = changeid
439 self._node = changeid
442 self._rev = repo.changelog.rev(changeid)
440 self._rev = repo.changelog.rev(changeid)
443 return
441 return
444 except error.FilteredRepoLookupError:
442 except error.FilteredRepoLookupError:
445 raise
443 raise
446 except LookupError:
444 except LookupError:
447 pass
445 pass
448
446
449 try:
447 try:
450 r = int(changeid)
448 r = int(changeid)
451 if '%d' % r != changeid:
449 if '%d' % r != changeid:
452 raise ValueError
450 raise ValueError
453 l = len(repo.changelog)
451 l = len(repo.changelog)
454 if r < 0:
452 if r < 0:
455 r += l
453 r += l
456 if r < 0 or r >= l and r != wdirrev:
454 if r < 0 or r >= l and r != wdirrev:
457 raise ValueError
455 raise ValueError
458 self._rev = r
456 self._rev = r
459 self._node = repo.changelog.node(r)
457 self._node = repo.changelog.node(r)
460 return
458 return
461 except error.FilteredIndexError:
459 except error.FilteredIndexError:
462 raise
460 raise
463 except (ValueError, OverflowError, IndexError):
461 except (ValueError, OverflowError, IndexError):
464 pass
462 pass
465
463
466 if len(changeid) == 40:
464 if len(changeid) == 40:
467 try:
465 try:
468 self._node = bin(changeid)
466 self._node = bin(changeid)
469 self._rev = repo.changelog.rev(self._node)
467 self._rev = repo.changelog.rev(self._node)
470 return
468 return
471 except error.FilteredLookupError:
469 except error.FilteredLookupError:
472 raise
470 raise
473 except (TypeError, LookupError):
471 except (TypeError, LookupError):
474 pass
472 pass
475
473
476 # lookup bookmarks through the name interface
474 # lookup bookmarks through the name interface
477 try:
475 try:
478 self._node = repo.names.singlenode(repo, changeid)
476 self._node = repo.names.singlenode(repo, changeid)
479 self._rev = repo.changelog.rev(self._node)
477 self._rev = repo.changelog.rev(self._node)
480 return
478 return
481 except KeyError:
479 except KeyError:
482 pass
480 pass
483 except error.FilteredRepoLookupError:
481 except error.FilteredRepoLookupError:
484 raise
482 raise
485 except error.RepoLookupError:
483 except error.RepoLookupError:
486 pass
484 pass
487
485
488 self._node = repo.unfiltered().changelog._partialmatch(changeid)
486 self._node = repo.unfiltered().changelog._partialmatch(changeid)
489 if self._node is not None:
487 if self._node is not None:
490 self._rev = repo.changelog.rev(self._node)
488 self._rev = repo.changelog.rev(self._node)
491 return
489 return
492
490
493 # lookup failed
491 # lookup failed
494 # check if it might have come from damaged dirstate
492 # check if it might have come from damaged dirstate
495 #
493 #
496 # XXX we could avoid the unfiltered if we had a recognizable
494 # XXX we could avoid the unfiltered if we had a recognizable
497 # exception for filtered changeset access
495 # exception for filtered changeset access
498 if (repo.local()
496 if (repo.local()
499 and changeid in repo.unfiltered().dirstate.parents()):
497 and changeid in repo.unfiltered().dirstate.parents()):
500 msg = _("working directory has unknown parent '%s'!")
498 msg = _("working directory has unknown parent '%s'!")
501 raise error.Abort(msg % short(changeid))
499 raise error.Abort(msg % short(changeid))
502 try:
500 try:
503 if len(changeid) == 20 and nonascii(changeid):
501 if len(changeid) == 20 and nonascii(changeid):
504 changeid = hex(changeid)
502 changeid = hex(changeid)
505 except TypeError:
503 except TypeError:
506 pass
504 pass
507 except (error.FilteredIndexError, error.FilteredLookupError,
505 except (error.FilteredIndexError, error.FilteredLookupError,
508 error.FilteredRepoLookupError):
506 error.FilteredRepoLookupError):
509 raise _filterederror(repo, changeid)
507 raise _filterederror(repo, changeid)
510 except IndexError:
508 except IndexError:
511 pass
509 pass
512 raise error.RepoLookupError(
510 raise error.RepoLookupError(
513 _("unknown revision '%s'") % changeid)
511 _("unknown revision '%s'") % changeid)
514
512
515 def __hash__(self):
513 def __hash__(self):
516 try:
514 try:
517 return hash(self._rev)
515 return hash(self._rev)
518 except AttributeError:
516 except AttributeError:
519 return id(self)
517 return id(self)
520
518
521 def __nonzero__(self):
519 def __nonzero__(self):
522 return self._rev != nullrev
520 return self._rev != nullrev
523
521
524 __bool__ = __nonzero__
522 __bool__ = __nonzero__
525
523
526 @propertycache
524 @propertycache
527 def _changeset(self):
525 def _changeset(self):
528 return self._repo.changelog.changelogrevision(self.rev())
526 return self._repo.changelog.changelogrevision(self.rev())
529
527
530 @propertycache
528 @propertycache
531 def _manifest(self):
529 def _manifest(self):
532 return self._manifestctx.read()
530 return self._manifestctx.read()
533
531
534 @property
532 @property
535 def _manifestctx(self):
533 def _manifestctx(self):
536 return self._repo.manifestlog[self._changeset.manifest]
534 return self._repo.manifestlog[self._changeset.manifest]
537
535
538 @propertycache
536 @propertycache
539 def _manifestdelta(self):
537 def _manifestdelta(self):
540 return self._manifestctx.readdelta()
538 return self._manifestctx.readdelta()
541
539
542 @propertycache
540 @propertycache
543 def _parents(self):
541 def _parents(self):
544 repo = self._repo
542 repo = self._repo
545 p1, p2 = repo.changelog.parentrevs(self._rev)
543 p1, p2 = repo.changelog.parentrevs(self._rev)
546 if p2 == nullrev:
544 if p2 == nullrev:
547 return [changectx(repo, p1)]
545 return [changectx(repo, p1)]
548 return [changectx(repo, p1), changectx(repo, p2)]
546 return [changectx(repo, p1), changectx(repo, p2)]
549
547
550 def changeset(self):
548 def changeset(self):
551 c = self._changeset
549 c = self._changeset
552 return (
550 return (
553 c.manifest,
551 c.manifest,
554 c.user,
552 c.user,
555 c.date,
553 c.date,
556 c.files,
554 c.files,
557 c.description,
555 c.description,
558 c.extra,
556 c.extra,
559 )
557 )
560 def manifestnode(self):
558 def manifestnode(self):
561 return self._changeset.manifest
559 return self._changeset.manifest
562
560
563 def user(self):
561 def user(self):
564 return self._changeset.user
562 return self._changeset.user
565 def date(self):
563 def date(self):
566 return self._changeset.date
564 return self._changeset.date
567 def files(self):
565 def files(self):
568 return self._changeset.files
566 return self._changeset.files
569 def description(self):
567 def description(self):
570 return self._changeset.description
568 return self._changeset.description
571 def branch(self):
569 def branch(self):
572 return encoding.tolocal(self._changeset.extra.get("branch"))
570 return encoding.tolocal(self._changeset.extra.get("branch"))
573 def closesbranch(self):
571 def closesbranch(self):
574 return 'close' in self._changeset.extra
572 return 'close' in self._changeset.extra
575 def extra(self):
573 def extra(self):
576 """Return a dict of extra information."""
574 """Return a dict of extra information."""
577 return self._changeset.extra
575 return self._changeset.extra
578 def tags(self):
576 def tags(self):
579 """Return a list of byte tag names"""
577 """Return a list of byte tag names"""
580 return self._repo.nodetags(self._node)
578 return self._repo.nodetags(self._node)
581 def bookmarks(self):
579 def bookmarks(self):
582 """Return a list of byte bookmark names."""
580 """Return a list of byte bookmark names."""
583 return self._repo.nodebookmarks(self._node)
581 return self._repo.nodebookmarks(self._node)
584 def phase(self):
582 def phase(self):
585 return self._repo._phasecache.phase(self._repo, self._rev)
583 return self._repo._phasecache.phase(self._repo, self._rev)
586 def hidden(self):
584 def hidden(self):
587 return self._rev in repoview.filterrevs(self._repo, 'visible')
585 return self._rev in repoview.filterrevs(self._repo, 'visible')
588
586
589 def isinmemory(self):
587 def isinmemory(self):
590 return False
588 return False
591
589
592 def children(self):
590 def children(self):
593 """return list of changectx contexts for each child changeset.
591 """return list of changectx contexts for each child changeset.
594
592
595 This returns only the immediate child changesets. Use descendants() to
593 This returns only the immediate child changesets. Use descendants() to
596 recursively walk children.
594 recursively walk children.
597 """
595 """
598 c = self._repo.changelog.children(self._node)
596 c = self._repo.changelog.children(self._node)
599 return [changectx(self._repo, x) for x in c]
597 return [changectx(self._repo, x) for x in c]
600
598
601 def ancestors(self):
599 def ancestors(self):
602 for a in self._repo.changelog.ancestors([self._rev]):
600 for a in self._repo.changelog.ancestors([self._rev]):
603 yield changectx(self._repo, a)
601 yield changectx(self._repo, a)
604
602
605 def descendants(self):
603 def descendants(self):
606 """Recursively yield all children of the changeset.
604 """Recursively yield all children of the changeset.
607
605
608 For just the immediate children, use children()
606 For just the immediate children, use children()
609 """
607 """
610 for d in self._repo.changelog.descendants([self._rev]):
608 for d in self._repo.changelog.descendants([self._rev]):
611 yield changectx(self._repo, d)
609 yield changectx(self._repo, d)
612
610
613 def filectx(self, path, fileid=None, filelog=None):
611 def filectx(self, path, fileid=None, filelog=None):
614 """get a file context from this changeset"""
612 """get a file context from this changeset"""
615 if fileid is None:
613 if fileid is None:
616 fileid = self.filenode(path)
614 fileid = self.filenode(path)
617 return filectx(self._repo, path, fileid=fileid,
615 return filectx(self._repo, path, fileid=fileid,
618 changectx=self, filelog=filelog)
616 changectx=self, filelog=filelog)
619
617
620 def ancestor(self, c2, warn=False):
618 def ancestor(self, c2, warn=False):
621 """return the "best" ancestor context of self and c2
619 """return the "best" ancestor context of self and c2
622
620
623 If there are multiple candidates, it will show a message and check
621 If there are multiple candidates, it will show a message and check
624 merge.preferancestor configuration before falling back to the
622 merge.preferancestor configuration before falling back to the
625 revlog ancestor."""
623 revlog ancestor."""
626 # deal with workingctxs
624 # deal with workingctxs
627 n2 = c2._node
625 n2 = c2._node
628 if n2 is None:
626 if n2 is None:
629 n2 = c2._parents[0]._node
627 n2 = c2._parents[0]._node
630 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
628 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
631 if not cahs:
629 if not cahs:
632 anc = nullid
630 anc = nullid
633 elif len(cahs) == 1:
631 elif len(cahs) == 1:
634 anc = cahs[0]
632 anc = cahs[0]
635 else:
633 else:
636 # experimental config: merge.preferancestor
634 # experimental config: merge.preferancestor
637 for r in self._repo.ui.configlist('merge', 'preferancestor'):
635 for r in self._repo.ui.configlist('merge', 'preferancestor'):
638 try:
636 try:
639 ctx = changectx(self._repo, r)
637 ctx = changectx(self._repo, r)
640 except error.RepoLookupError:
638 except error.RepoLookupError:
641 continue
639 continue
642 anc = ctx.node()
640 anc = ctx.node()
643 if anc in cahs:
641 if anc in cahs:
644 break
642 break
645 else:
643 else:
646 anc = self._repo.changelog.ancestor(self._node, n2)
644 anc = self._repo.changelog.ancestor(self._node, n2)
647 if warn:
645 if warn:
648 self._repo.ui.status(
646 self._repo.ui.status(
649 (_("note: using %s as ancestor of %s and %s\n") %
647 (_("note: using %s as ancestor of %s and %s\n") %
650 (short(anc), short(self._node), short(n2))) +
648 (short(anc), short(self._node), short(n2))) +
651 ''.join(_(" alternatively, use --config "
649 ''.join(_(" alternatively, use --config "
652 "merge.preferancestor=%s\n") %
650 "merge.preferancestor=%s\n") %
653 short(n) for n in sorted(cahs) if n != anc))
651 short(n) for n in sorted(cahs) if n != anc))
654 return changectx(self._repo, anc)
652 return changectx(self._repo, anc)
655
653
656 def descendant(self, other):
654 def descendant(self, other):
657 """True if other is descendant of this changeset"""
655 """True if other is descendant of this changeset"""
658 return self._repo.changelog.descendant(self._rev, other._rev)
656 return self._repo.changelog.descendant(self._rev, other._rev)
659
657
660 def walk(self, match):
658 def walk(self, match):
661 '''Generates matching file names.'''
659 '''Generates matching file names.'''
662
660
663 # Wrap match.bad method to have message with nodeid
661 # Wrap match.bad method to have message with nodeid
664 def bad(fn, msg):
662 def bad(fn, msg):
665 # The manifest doesn't know about subrepos, so don't complain about
663 # The manifest doesn't know about subrepos, so don't complain about
666 # paths into valid subrepos.
664 # paths into valid subrepos.
667 if any(fn == s or fn.startswith(s + '/')
665 if any(fn == s or fn.startswith(s + '/')
668 for s in self.substate):
666 for s in self.substate):
669 return
667 return
670 match.bad(fn, _('no such file in rev %s') % self)
668 match.bad(fn, _('no such file in rev %s') % self)
671
669
672 m = matchmod.badmatch(match, bad)
670 m = matchmod.badmatch(match, bad)
673 return self._manifest.walk(m)
671 return self._manifest.walk(m)
674
672
675 def matches(self, match):
673 def matches(self, match):
676 return self.walk(match)
674 return self.walk(match)
677
675
678 class basefilectx(object):
676 class basefilectx(object):
679 """A filecontext object represents the common logic for its children:
677 """A filecontext object represents the common logic for its children:
680 filectx: read-only access to a filerevision that is already present
678 filectx: read-only access to a filerevision that is already present
681 in the repo,
679 in the repo,
682 workingfilectx: a filecontext that represents files from the working
680 workingfilectx: a filecontext that represents files from the working
683 directory,
681 directory,
684 memfilectx: a filecontext that represents files in-memory,
682 memfilectx: a filecontext that represents files in-memory,
685 overlayfilectx: duplicate another filecontext with some fields overridden.
683 overlayfilectx: duplicate another filecontext with some fields overridden.
686 """
684 """
687 @propertycache
685 @propertycache
688 def _filelog(self):
686 def _filelog(self):
689 return self._repo.file(self._path)
687 return self._repo.file(self._path)
690
688
691 @propertycache
689 @propertycache
692 def _changeid(self):
690 def _changeid(self):
693 if r'_changeid' in self.__dict__:
691 if r'_changeid' in self.__dict__:
694 return self._changeid
692 return self._changeid
695 elif r'_changectx' in self.__dict__:
693 elif r'_changectx' in self.__dict__:
696 return self._changectx.rev()
694 return self._changectx.rev()
697 elif r'_descendantrev' in self.__dict__:
695 elif r'_descendantrev' in self.__dict__:
698 # this file context was created from a revision with a known
696 # this file context was created from a revision with a known
699 # descendant, we can (lazily) correct for linkrev aliases
697 # descendant, we can (lazily) correct for linkrev aliases
700 return self._adjustlinkrev(self._descendantrev)
698 return self._adjustlinkrev(self._descendantrev)
701 else:
699 else:
702 return self._filelog.linkrev(self._filerev)
700 return self._filelog.linkrev(self._filerev)
703
701
704 @propertycache
702 @propertycache
705 def _filenode(self):
703 def _filenode(self):
706 if r'_fileid' in self.__dict__:
704 if r'_fileid' in self.__dict__:
707 return self._filelog.lookup(self._fileid)
705 return self._filelog.lookup(self._fileid)
708 else:
706 else:
709 return self._changectx.filenode(self._path)
707 return self._changectx.filenode(self._path)
710
708
711 @propertycache
709 @propertycache
712 def _filerev(self):
710 def _filerev(self):
713 return self._filelog.rev(self._filenode)
711 return self._filelog.rev(self._filenode)
714
712
715 @propertycache
713 @propertycache
716 def _repopath(self):
714 def _repopath(self):
717 return self._path
715 return self._path
718
716
719 def __nonzero__(self):
717 def __nonzero__(self):
720 try:
718 try:
721 self._filenode
719 self._filenode
722 return True
720 return True
723 except error.LookupError:
721 except error.LookupError:
724 # file is missing
722 # file is missing
725 return False
723 return False
726
724
727 __bool__ = __nonzero__
725 __bool__ = __nonzero__
728
726
729 def __bytes__(self):
727 def __bytes__(self):
730 try:
728 try:
731 return "%s@%s" % (self.path(), self._changectx)
729 return "%s@%s" % (self.path(), self._changectx)
732 except error.LookupError:
730 except error.LookupError:
733 return "%s@???" % self.path()
731 return "%s@???" % self.path()
734
732
735 __str__ = encoding.strmethod(__bytes__)
733 __str__ = encoding.strmethod(__bytes__)
736
734
737 def __repr__(self):
735 def __repr__(self):
738 return r"<%s %s>" % (type(self).__name__, str(self))
736 return r"<%s %s>" % (type(self).__name__, str(self))
739
737
740 def __hash__(self):
738 def __hash__(self):
741 try:
739 try:
742 return hash((self._path, self._filenode))
740 return hash((self._path, self._filenode))
743 except AttributeError:
741 except AttributeError:
744 return id(self)
742 return id(self)
745
743
746 def __eq__(self, other):
744 def __eq__(self, other):
747 try:
745 try:
748 return (type(self) == type(other) and self._path == other._path
746 return (type(self) == type(other) and self._path == other._path
749 and self._filenode == other._filenode)
747 and self._filenode == other._filenode)
750 except AttributeError:
748 except AttributeError:
751 return False
749 return False
752
750
753 def __ne__(self, other):
751 def __ne__(self, other):
754 return not (self == other)
752 return not (self == other)
755
753
756 def filerev(self):
754 def filerev(self):
757 return self._filerev
755 return self._filerev
758 def filenode(self):
756 def filenode(self):
759 return self._filenode
757 return self._filenode
760 @propertycache
758 @propertycache
761 def _flags(self):
759 def _flags(self):
762 return self._changectx.flags(self._path)
760 return self._changectx.flags(self._path)
763 def flags(self):
761 def flags(self):
764 return self._flags
762 return self._flags
765 def filelog(self):
763 def filelog(self):
766 return self._filelog
764 return self._filelog
767 def rev(self):
765 def rev(self):
768 return self._changeid
766 return self._changeid
769 def linkrev(self):
767 def linkrev(self):
770 return self._filelog.linkrev(self._filerev)
768 return self._filelog.linkrev(self._filerev)
771 def node(self):
769 def node(self):
772 return self._changectx.node()
770 return self._changectx.node()
773 def hex(self):
771 def hex(self):
774 return self._changectx.hex()
772 return self._changectx.hex()
775 def user(self):
773 def user(self):
776 return self._changectx.user()
774 return self._changectx.user()
777 def date(self):
775 def date(self):
778 return self._changectx.date()
776 return self._changectx.date()
779 def files(self):
777 def files(self):
780 return self._changectx.files()
778 return self._changectx.files()
781 def description(self):
779 def description(self):
782 return self._changectx.description()
780 return self._changectx.description()
783 def branch(self):
781 def branch(self):
784 return self._changectx.branch()
782 return self._changectx.branch()
785 def extra(self):
783 def extra(self):
786 return self._changectx.extra()
784 return self._changectx.extra()
787 def phase(self):
785 def phase(self):
788 return self._changectx.phase()
786 return self._changectx.phase()
789 def phasestr(self):
787 def phasestr(self):
790 return self._changectx.phasestr()
788 return self._changectx.phasestr()
791 def obsolete(self):
789 def obsolete(self):
792 return self._changectx.obsolete()
790 return self._changectx.obsolete()
793 def instabilities(self):
791 def instabilities(self):
794 return self._changectx.instabilities()
792 return self._changectx.instabilities()
795 def manifest(self):
793 def manifest(self):
796 return self._changectx.manifest()
794 return self._changectx.manifest()
797 def changectx(self):
795 def changectx(self):
798 return self._changectx
796 return self._changectx
799 def renamed(self):
797 def renamed(self):
800 return self._copied
798 return self._copied
801 def repo(self):
799 def repo(self):
802 return self._repo
800 return self._repo
803 def size(self):
801 def size(self):
804 return len(self.data())
802 return len(self.data())
805
803
806 def path(self):
804 def path(self):
807 return self._path
805 return self._path
808
806
809 def isbinary(self):
807 def isbinary(self):
810 try:
808 try:
811 return stringutil.binary(self.data())
809 return stringutil.binary(self.data())
812 except IOError:
810 except IOError:
813 return False
811 return False
814 def isexec(self):
812 def isexec(self):
815 return 'x' in self.flags()
813 return 'x' in self.flags()
816 def islink(self):
814 def islink(self):
817 return 'l' in self.flags()
815 return 'l' in self.flags()
818
816
819 def isabsent(self):
817 def isabsent(self):
820 """whether this filectx represents a file not in self._changectx
818 """whether this filectx represents a file not in self._changectx
821
819
822 This is mainly for merge code to detect change/delete conflicts. This is
820 This is mainly for merge code to detect change/delete conflicts. This is
823 expected to be True for all subclasses of basectx."""
821 expected to be True for all subclasses of basectx."""
824 return False
822 return False
825
823
826 _customcmp = False
824 _customcmp = False
827 def cmp(self, fctx):
825 def cmp(self, fctx):
828 """compare with other file context
826 """compare with other file context
829
827
830 returns True if different than fctx.
828 returns True if different than fctx.
831 """
829 """
832 if fctx._customcmp:
830 if fctx._customcmp:
833 return fctx.cmp(self)
831 return fctx.cmp(self)
834
832
835 if (fctx._filenode is None
833 if (fctx._filenode is None
836 and (self._repo._encodefilterpats
834 and (self._repo._encodefilterpats
837 # if file data starts with '\1\n', empty metadata block is
835 # if file data starts with '\1\n', empty metadata block is
838 # prepended, which adds 4 bytes to filelog.size().
836 # prepended, which adds 4 bytes to filelog.size().
839 or self.size() - 4 == fctx.size())
837 or self.size() - 4 == fctx.size())
840 or self.size() == fctx.size()):
838 or self.size() == fctx.size()):
841 return self._filelog.cmp(self._filenode, fctx.data())
839 return self._filelog.cmp(self._filenode, fctx.data())
842
840
843 return True
841 return True
844
842
845 def _adjustlinkrev(self, srcrev, inclusive=False):
843 def _adjustlinkrev(self, srcrev, inclusive=False):
846 """return the first ancestor of <srcrev> introducing <fnode>
844 """return the first ancestor of <srcrev> introducing <fnode>
847
845
848 If the linkrev of the file revision does not point to an ancestor of
846 If the linkrev of the file revision does not point to an ancestor of
849 srcrev, we'll walk down the ancestors until we find one introducing
847 srcrev, we'll walk down the ancestors until we find one introducing
850 this file revision.
848 this file revision.
851
849
852 :srcrev: the changeset revision we search ancestors from
850 :srcrev: the changeset revision we search ancestors from
853 :inclusive: if true, the src revision will also be checked
851 :inclusive: if true, the src revision will also be checked
854 """
852 """
855 repo = self._repo
853 repo = self._repo
856 cl = repo.unfiltered().changelog
854 cl = repo.unfiltered().changelog
857 mfl = repo.manifestlog
855 mfl = repo.manifestlog
858 # fetch the linkrev
856 # fetch the linkrev
859 lkr = self.linkrev()
857 lkr = self.linkrev()
860 # hack to reuse ancestor computation when searching for renames
858 # hack to reuse ancestor computation when searching for renames
861 memberanc = getattr(self, '_ancestrycontext', None)
859 memberanc = getattr(self, '_ancestrycontext', None)
862 iteranc = None
860 iteranc = None
863 if srcrev is None:
861 if srcrev is None:
864 # wctx case, used by workingfilectx during mergecopy
862 # wctx case, used by workingfilectx during mergecopy
865 revs = [p.rev() for p in self._repo[None].parents()]
863 revs = [p.rev() for p in self._repo[None].parents()]
866 inclusive = True # we skipped the real (revless) source
864 inclusive = True # we skipped the real (revless) source
867 else:
865 else:
868 revs = [srcrev]
866 revs = [srcrev]
869 if memberanc is None:
867 if memberanc is None:
870 memberanc = iteranc = cl.ancestors(revs, lkr,
868 memberanc = iteranc = cl.ancestors(revs, lkr,
871 inclusive=inclusive)
869 inclusive=inclusive)
872 # check if this linkrev is an ancestor of srcrev
870 # check if this linkrev is an ancestor of srcrev
873 if lkr not in memberanc:
871 if lkr not in memberanc:
874 if iteranc is None:
872 if iteranc is None:
875 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
873 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
876 fnode = self._filenode
874 fnode = self._filenode
877 path = self._path
875 path = self._path
878 for a in iteranc:
876 for a in iteranc:
879 ac = cl.read(a) # get changeset data (we avoid object creation)
877 ac = cl.read(a) # get changeset data (we avoid object creation)
880 if path in ac[3]: # checking the 'files' field.
878 if path in ac[3]: # checking the 'files' field.
881 # The file has been touched, check if the content is
879 # The file has been touched, check if the content is
882 # similar to the one we search for.
880 # similar to the one we search for.
883 if fnode == mfl[ac[0]].readfast().get(path):
881 if fnode == mfl[ac[0]].readfast().get(path):
884 return a
882 return a
885 # In theory, we should never get out of that loop without a result.
883 # In theory, we should never get out of that loop without a result.
886 # But if manifest uses a buggy file revision (not children of the
884 # But if manifest uses a buggy file revision (not children of the
887 # one it replaces) we could. Such a buggy situation will likely
885 # one it replaces) we could. Such a buggy situation will likely
888 # result is crash somewhere else at to some point.
886 # result is crash somewhere else at to some point.
889 return lkr
887 return lkr
890
888
891 def introrev(self):
889 def introrev(self):
892 """return the rev of the changeset which introduced this file revision
890 """return the rev of the changeset which introduced this file revision
893
891
894 This method is different from linkrev because it take into account the
892 This method is different from linkrev because it take into account the
895 changeset the filectx was created from. It ensures the returned
893 changeset the filectx was created from. It ensures the returned
896 revision is one of its ancestors. This prevents bugs from
894 revision is one of its ancestors. This prevents bugs from
897 'linkrev-shadowing' when a file revision is used by multiple
895 'linkrev-shadowing' when a file revision is used by multiple
898 changesets.
896 changesets.
899 """
897 """
900 lkr = self.linkrev()
898 lkr = self.linkrev()
901 attrs = vars(self)
899 attrs = vars(self)
902 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
900 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
903 if noctx or self.rev() == lkr:
901 if noctx or self.rev() == lkr:
904 return self.linkrev()
902 return self.linkrev()
905 return self._adjustlinkrev(self.rev(), inclusive=True)
903 return self._adjustlinkrev(self.rev(), inclusive=True)
906
904
907 def introfilectx(self):
905 def introfilectx(self):
908 """Return filectx having identical contents, but pointing to the
906 """Return filectx having identical contents, but pointing to the
909 changeset revision where this filectx was introduced"""
907 changeset revision where this filectx was introduced"""
910 introrev = self.introrev()
908 introrev = self.introrev()
911 if self.rev() == introrev:
909 if self.rev() == introrev:
912 return self
910 return self
913 return self.filectx(self.filenode(), changeid=introrev)
911 return self.filectx(self.filenode(), changeid=introrev)
914
912
915 def _parentfilectx(self, path, fileid, filelog):
913 def _parentfilectx(self, path, fileid, filelog):
916 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
914 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
917 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
915 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
918 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
916 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
919 # If self is associated with a changeset (probably explicitly
917 # If self is associated with a changeset (probably explicitly
920 # fed), ensure the created filectx is associated with a
918 # fed), ensure the created filectx is associated with a
921 # changeset that is an ancestor of self.changectx.
919 # changeset that is an ancestor of self.changectx.
922 # This lets us later use _adjustlinkrev to get a correct link.
920 # This lets us later use _adjustlinkrev to get a correct link.
923 fctx._descendantrev = self.rev()
921 fctx._descendantrev = self.rev()
924 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
922 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
925 elif r'_descendantrev' in vars(self):
923 elif r'_descendantrev' in vars(self):
926 # Otherwise propagate _descendantrev if we have one associated.
924 # Otherwise propagate _descendantrev if we have one associated.
927 fctx._descendantrev = self._descendantrev
925 fctx._descendantrev = self._descendantrev
928 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
926 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
929 return fctx
927 return fctx
930
928
931 def parents(self):
929 def parents(self):
932 _path = self._path
930 _path = self._path
933 fl = self._filelog
931 fl = self._filelog
934 parents = self._filelog.parents(self._filenode)
932 parents = self._filelog.parents(self._filenode)
935 pl = [(_path, node, fl) for node in parents if node != nullid]
933 pl = [(_path, node, fl) for node in parents if node != nullid]
936
934
937 r = fl.renamed(self._filenode)
935 r = fl.renamed(self._filenode)
938 if r:
936 if r:
939 # - In the simple rename case, both parent are nullid, pl is empty.
937 # - In the simple rename case, both parent are nullid, pl is empty.
940 # - In case of merge, only one of the parent is null id and should
938 # - In case of merge, only one of the parent is null id and should
941 # be replaced with the rename information. This parent is -always-
939 # be replaced with the rename information. This parent is -always-
942 # the first one.
940 # the first one.
943 #
941 #
944 # As null id have always been filtered out in the previous list
942 # As null id have always been filtered out in the previous list
945 # comprehension, inserting to 0 will always result in "replacing
943 # comprehension, inserting to 0 will always result in "replacing
946 # first nullid parent with rename information.
944 # first nullid parent with rename information.
947 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
945 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
948
946
949 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
947 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
950
948
951 def p1(self):
949 def p1(self):
952 return self.parents()[0]
950 return self.parents()[0]
953
951
954 def p2(self):
952 def p2(self):
955 p = self.parents()
953 p = self.parents()
956 if len(p) == 2:
954 if len(p) == 2:
957 return p[1]
955 return p[1]
958 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
956 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
959
957
960 def annotate(self, follow=False, skiprevs=None, diffopts=None):
958 def annotate(self, follow=False, skiprevs=None, diffopts=None):
961 """Returns a list of annotateline objects for each line in the file
959 """Returns a list of annotateline objects for each line in the file
962
960
963 - line.fctx is the filectx of the node where that line was last changed
961 - line.fctx is the filectx of the node where that line was last changed
964 - line.lineno is the line number at the first appearance in the managed
962 - line.lineno is the line number at the first appearance in the managed
965 file
963 file
966 - line.text is the data on that line (including newline character)
964 - line.text is the data on that line (including newline character)
967 """
965 """
968 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
966 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
969
967
970 def parents(f):
968 def parents(f):
971 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
969 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
972 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
970 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
973 # from the topmost introrev (= srcrev) down to p.linkrev() if it
971 # from the topmost introrev (= srcrev) down to p.linkrev() if it
974 # isn't an ancestor of the srcrev.
972 # isn't an ancestor of the srcrev.
975 f._changeid
973 f._changeid
976 pl = f.parents()
974 pl = f.parents()
977
975
978 # Don't return renamed parents if we aren't following.
976 # Don't return renamed parents if we aren't following.
979 if not follow:
977 if not follow:
980 pl = [p for p in pl if p.path() == f.path()]
978 pl = [p for p in pl if p.path() == f.path()]
981
979
982 # renamed filectx won't have a filelog yet, so set it
980 # renamed filectx won't have a filelog yet, so set it
983 # from the cache to save time
981 # from the cache to save time
984 for p in pl:
982 for p in pl:
985 if not r'_filelog' in p.__dict__:
983 if not r'_filelog' in p.__dict__:
986 p._filelog = getlog(p.path())
984 p._filelog = getlog(p.path())
987
985
988 return pl
986 return pl
989
987
990 # use linkrev to find the first changeset where self appeared
988 # use linkrev to find the first changeset where self appeared
991 base = self.introfilectx()
989 base = self.introfilectx()
992 if getattr(base, '_ancestrycontext', None) is None:
990 if getattr(base, '_ancestrycontext', None) is None:
993 cl = self._repo.changelog
991 cl = self._repo.changelog
994 if base.rev() is None:
992 if base.rev() is None:
995 # wctx is not inclusive, but works because _ancestrycontext
993 # wctx is not inclusive, but works because _ancestrycontext
996 # is used to test filelog revisions
994 # is used to test filelog revisions
997 ac = cl.ancestors([p.rev() for p in base.parents()],
995 ac = cl.ancestors([p.rev() for p in base.parents()],
998 inclusive=True)
996 inclusive=True)
999 else:
997 else:
1000 ac = cl.ancestors([base.rev()], inclusive=True)
998 ac = cl.ancestors([base.rev()], inclusive=True)
1001 base._ancestrycontext = ac
999 base._ancestrycontext = ac
1002
1000
1003 return dagop.annotate(base, parents, skiprevs=skiprevs,
1001 return dagop.annotate(base, parents, skiprevs=skiprevs,
1004 diffopts=diffopts)
1002 diffopts=diffopts)
1005
1003
1006 def ancestors(self, followfirst=False):
1004 def ancestors(self, followfirst=False):
1007 visit = {}
1005 visit = {}
1008 c = self
1006 c = self
1009 if followfirst:
1007 if followfirst:
1010 cut = 1
1008 cut = 1
1011 else:
1009 else:
1012 cut = None
1010 cut = None
1013
1011
1014 while True:
1012 while True:
1015 for parent in c.parents()[:cut]:
1013 for parent in c.parents()[:cut]:
1016 visit[(parent.linkrev(), parent.filenode())] = parent
1014 visit[(parent.linkrev(), parent.filenode())] = parent
1017 if not visit:
1015 if not visit:
1018 break
1016 break
1019 c = visit.pop(max(visit))
1017 c = visit.pop(max(visit))
1020 yield c
1018 yield c
1021
1019
1022 def decodeddata(self):
1020 def decodeddata(self):
1023 """Returns `data()` after running repository decoding filters.
1021 """Returns `data()` after running repository decoding filters.
1024
1022
1025 This is often equivalent to how the data would be expressed on disk.
1023 This is often equivalent to how the data would be expressed on disk.
1026 """
1024 """
1027 return self._repo.wwritedata(self.path(), self.data())
1025 return self._repo.wwritedata(self.path(), self.data())
1028
1026
1029 class filectx(basefilectx):
1027 class filectx(basefilectx):
1030 """A filecontext object makes access to data related to a particular
1028 """A filecontext object makes access to data related to a particular
1031 filerevision convenient."""
1029 filerevision convenient."""
1032 def __init__(self, repo, path, changeid=None, fileid=None,
1030 def __init__(self, repo, path, changeid=None, fileid=None,
1033 filelog=None, changectx=None):
1031 filelog=None, changectx=None):
1034 """changeid can be a changeset revision, node, or tag.
1032 """changeid can be a changeset revision, node, or tag.
1035 fileid can be a file revision or node."""
1033 fileid can be a file revision or node."""
1036 self._repo = repo
1034 self._repo = repo
1037 self._path = path
1035 self._path = path
1038
1036
1039 assert (changeid is not None
1037 assert (changeid is not None
1040 or fileid is not None
1038 or fileid is not None
1041 or changectx is not None), \
1039 or changectx is not None), \
1042 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1040 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1043 % (changeid, fileid, changectx))
1041 % (changeid, fileid, changectx))
1044
1042
1045 if filelog is not None:
1043 if filelog is not None:
1046 self._filelog = filelog
1044 self._filelog = filelog
1047
1045
1048 if changeid is not None:
1046 if changeid is not None:
1049 self._changeid = changeid
1047 self._changeid = changeid
1050 if changectx is not None:
1048 if changectx is not None:
1051 self._changectx = changectx
1049 self._changectx = changectx
1052 if fileid is not None:
1050 if fileid is not None:
1053 self._fileid = fileid
1051 self._fileid = fileid
1054
1052
1055 @propertycache
1053 @propertycache
1056 def _changectx(self):
1054 def _changectx(self):
1057 try:
1055 try:
1058 return changectx(self._repo, self._changeid)
1056 return changectx(self._repo, self._changeid)
1059 except error.FilteredRepoLookupError:
1057 except error.FilteredRepoLookupError:
1060 # Linkrev may point to any revision in the repository. When the
1058 # Linkrev may point to any revision in the repository. When the
1061 # repository is filtered this may lead to `filectx` trying to build
1059 # repository is filtered this may lead to `filectx` trying to build
1062 # `changectx` for filtered revision. In such case we fallback to
1060 # `changectx` for filtered revision. In such case we fallback to
1063 # creating `changectx` on the unfiltered version of the reposition.
1061 # creating `changectx` on the unfiltered version of the reposition.
1064 # This fallback should not be an issue because `changectx` from
1062 # This fallback should not be an issue because `changectx` from
1065 # `filectx` are not used in complex operations that care about
1063 # `filectx` are not used in complex operations that care about
1066 # filtering.
1064 # filtering.
1067 #
1065 #
1068 # This fallback is a cheap and dirty fix that prevent several
1066 # This fallback is a cheap and dirty fix that prevent several
1069 # crashes. It does not ensure the behavior is correct. However the
1067 # crashes. It does not ensure the behavior is correct. However the
1070 # behavior was not correct before filtering either and "incorrect
1068 # behavior was not correct before filtering either and "incorrect
1071 # behavior" is seen as better as "crash"
1069 # behavior" is seen as better as "crash"
1072 #
1070 #
1073 # Linkrevs have several serious troubles with filtering that are
1071 # Linkrevs have several serious troubles with filtering that are
1074 # complicated to solve. Proper handling of the issue here should be
1072 # complicated to solve. Proper handling of the issue here should be
1075 # considered when solving linkrev issue are on the table.
1073 # considered when solving linkrev issue are on the table.
1076 return changectx(self._repo.unfiltered(), self._changeid)
1074 return changectx(self._repo.unfiltered(), self._changeid)
1077
1075
1078 def filectx(self, fileid, changeid=None):
1076 def filectx(self, fileid, changeid=None):
1079 '''opens an arbitrary revision of the file without
1077 '''opens an arbitrary revision of the file without
1080 opening a new filelog'''
1078 opening a new filelog'''
1081 return filectx(self._repo, self._path, fileid=fileid,
1079 return filectx(self._repo, self._path, fileid=fileid,
1082 filelog=self._filelog, changeid=changeid)
1080 filelog=self._filelog, changeid=changeid)
1083
1081
1084 def rawdata(self):
1082 def rawdata(self):
1085 return self._filelog.revision(self._filenode, raw=True)
1083 return self._filelog.revision(self._filenode, raw=True)
1086
1084
1087 def rawflags(self):
1085 def rawflags(self):
1088 """low-level revlog flags"""
1086 """low-level revlog flags"""
1089 return self._filelog.flags(self._filerev)
1087 return self._filelog.flags(self._filerev)
1090
1088
1091 def data(self):
1089 def data(self):
1092 try:
1090 try:
1093 return self._filelog.read(self._filenode)
1091 return self._filelog.read(self._filenode)
1094 except error.CensoredNodeError:
1092 except error.CensoredNodeError:
1095 if self._repo.ui.config("censor", "policy") == "ignore":
1093 if self._repo.ui.config("censor", "policy") == "ignore":
1096 return ""
1094 return ""
1097 raise error.Abort(_("censored node: %s") % short(self._filenode),
1095 raise error.Abort(_("censored node: %s") % short(self._filenode),
1098 hint=_("set censor.policy to ignore errors"))
1096 hint=_("set censor.policy to ignore errors"))
1099
1097
1100 def size(self):
1098 def size(self):
1101 return self._filelog.size(self._filerev)
1099 return self._filelog.size(self._filerev)
1102
1100
1103 @propertycache
1101 @propertycache
1104 def _copied(self):
1102 def _copied(self):
1105 """check if file was actually renamed in this changeset revision
1103 """check if file was actually renamed in this changeset revision
1106
1104
1107 If rename logged in file revision, we report copy for changeset only
1105 If rename logged in file revision, we report copy for changeset only
1108 if file revisions linkrev points back to the changeset in question
1106 if file revisions linkrev points back to the changeset in question
1109 or both changeset parents contain different file revisions.
1107 or both changeset parents contain different file revisions.
1110 """
1108 """
1111
1109
1112 renamed = self._filelog.renamed(self._filenode)
1110 renamed = self._filelog.renamed(self._filenode)
1113 if not renamed:
1111 if not renamed:
1114 return renamed
1112 return renamed
1115
1113
1116 if self.rev() == self.linkrev():
1114 if self.rev() == self.linkrev():
1117 return renamed
1115 return renamed
1118
1116
1119 name = self.path()
1117 name = self.path()
1120 fnode = self._filenode
1118 fnode = self._filenode
1121 for p in self._changectx.parents():
1119 for p in self._changectx.parents():
1122 try:
1120 try:
1123 if fnode == p.filenode(name):
1121 if fnode == p.filenode(name):
1124 return None
1122 return None
1125 except error.LookupError:
1123 except error.LookupError:
1126 pass
1124 pass
1127 return renamed
1125 return renamed
1128
1126
1129 def children(self):
1127 def children(self):
1130 # hard for renames
1128 # hard for renames
1131 c = self._filelog.children(self._filenode)
1129 c = self._filelog.children(self._filenode)
1132 return [filectx(self._repo, self._path, fileid=x,
1130 return [filectx(self._repo, self._path, fileid=x,
1133 filelog=self._filelog) for x in c]
1131 filelog=self._filelog) for x in c]
1134
1132
1135 class committablectx(basectx):
1133 class committablectx(basectx):
1136 """A committablectx object provides common functionality for a context that
1134 """A committablectx object provides common functionality for a context that
1137 wants the ability to commit, e.g. workingctx or memctx."""
1135 wants the ability to commit, e.g. workingctx or memctx."""
1138 def __init__(self, repo, text="", user=None, date=None, extra=None,
1136 def __init__(self, repo, text="", user=None, date=None, extra=None,
1139 changes=None):
1137 changes=None):
1140 super(committablectx, self).__init__(repo)
1138 super(committablectx, self).__init__(repo)
1141 self._rev = None
1139 self._rev = None
1142 self._node = None
1140 self._node = None
1143 self._text = text
1141 self._text = text
1144 if date:
1142 if date:
1145 self._date = dateutil.parsedate(date)
1143 self._date = dateutil.parsedate(date)
1146 if user:
1144 if user:
1147 self._user = user
1145 self._user = user
1148 if changes:
1146 if changes:
1149 self._status = changes
1147 self._status = changes
1150
1148
1151 self._extra = {}
1149 self._extra = {}
1152 if extra:
1150 if extra:
1153 self._extra = extra.copy()
1151 self._extra = extra.copy()
1154 if 'branch' not in self._extra:
1152 if 'branch' not in self._extra:
1155 try:
1153 try:
1156 branch = encoding.fromlocal(self._repo.dirstate.branch())
1154 branch = encoding.fromlocal(self._repo.dirstate.branch())
1157 except UnicodeDecodeError:
1155 except UnicodeDecodeError:
1158 raise error.Abort(_('branch name not in UTF-8!'))
1156 raise error.Abort(_('branch name not in UTF-8!'))
1159 self._extra['branch'] = branch
1157 self._extra['branch'] = branch
1160 if self._extra['branch'] == '':
1158 if self._extra['branch'] == '':
1161 self._extra['branch'] = 'default'
1159 self._extra['branch'] = 'default'
1162
1160
1163 def __bytes__(self):
1161 def __bytes__(self):
1164 return bytes(self._parents[0]) + "+"
1162 return bytes(self._parents[0]) + "+"
1165
1163
1166 __str__ = encoding.strmethod(__bytes__)
1164 __str__ = encoding.strmethod(__bytes__)
1167
1165
1168 def __nonzero__(self):
1166 def __nonzero__(self):
1169 return True
1167 return True
1170
1168
1171 __bool__ = __nonzero__
1169 __bool__ = __nonzero__
1172
1170
1173 def _buildflagfunc(self):
1171 def _buildflagfunc(self):
1174 # Create a fallback function for getting file flags when the
1172 # Create a fallback function for getting file flags when the
1175 # filesystem doesn't support them
1173 # filesystem doesn't support them
1176
1174
1177 copiesget = self._repo.dirstate.copies().get
1175 copiesget = self._repo.dirstate.copies().get
1178 parents = self.parents()
1176 parents = self.parents()
1179 if len(parents) < 2:
1177 if len(parents) < 2:
1180 # when we have one parent, it's easy: copy from parent
1178 # when we have one parent, it's easy: copy from parent
1181 man = parents[0].manifest()
1179 man = parents[0].manifest()
1182 def func(f):
1180 def func(f):
1183 f = copiesget(f, f)
1181 f = copiesget(f, f)
1184 return man.flags(f)
1182 return man.flags(f)
1185 else:
1183 else:
1186 # merges are tricky: we try to reconstruct the unstored
1184 # merges are tricky: we try to reconstruct the unstored
1187 # result from the merge (issue1802)
1185 # result from the merge (issue1802)
1188 p1, p2 = parents
1186 p1, p2 = parents
1189 pa = p1.ancestor(p2)
1187 pa = p1.ancestor(p2)
1190 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1188 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1191
1189
1192 def func(f):
1190 def func(f):
1193 f = copiesget(f, f) # may be wrong for merges with copies
1191 f = copiesget(f, f) # may be wrong for merges with copies
1194 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1192 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1195 if fl1 == fl2:
1193 if fl1 == fl2:
1196 return fl1
1194 return fl1
1197 if fl1 == fla:
1195 if fl1 == fla:
1198 return fl2
1196 return fl2
1199 if fl2 == fla:
1197 if fl2 == fla:
1200 return fl1
1198 return fl1
1201 return '' # punt for conflicts
1199 return '' # punt for conflicts
1202
1200
1203 return func
1201 return func
1204
1202
1205 @propertycache
1203 @propertycache
1206 def _flagfunc(self):
1204 def _flagfunc(self):
1207 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1205 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1208
1206
1209 @propertycache
1207 @propertycache
1210 def _status(self):
1208 def _status(self):
1211 return self._repo.status()
1209 return self._repo.status()
1212
1210
1213 @propertycache
1211 @propertycache
1214 def _user(self):
1212 def _user(self):
1215 return self._repo.ui.username()
1213 return self._repo.ui.username()
1216
1214
1217 @propertycache
1215 @propertycache
1218 def _date(self):
1216 def _date(self):
1219 ui = self._repo.ui
1217 ui = self._repo.ui
1220 date = ui.configdate('devel', 'default-date')
1218 date = ui.configdate('devel', 'default-date')
1221 if date is None:
1219 if date is None:
1222 date = dateutil.makedate()
1220 date = dateutil.makedate()
1223 return date
1221 return date
1224
1222
1225 def subrev(self, subpath):
1223 def subrev(self, subpath):
1226 return None
1224 return None
1227
1225
1228 def manifestnode(self):
1226 def manifestnode(self):
1229 return None
1227 return None
1230 def user(self):
1228 def user(self):
1231 return self._user or self._repo.ui.username()
1229 return self._user or self._repo.ui.username()
1232 def date(self):
1230 def date(self):
1233 return self._date
1231 return self._date
1234 def description(self):
1232 def description(self):
1235 return self._text
1233 return self._text
1236 def files(self):
1234 def files(self):
1237 return sorted(self._status.modified + self._status.added +
1235 return sorted(self._status.modified + self._status.added +
1238 self._status.removed)
1236 self._status.removed)
1239
1237
1240 def modified(self):
1238 def modified(self):
1241 return self._status.modified
1239 return self._status.modified
1242 def added(self):
1240 def added(self):
1243 return self._status.added
1241 return self._status.added
1244 def removed(self):
1242 def removed(self):
1245 return self._status.removed
1243 return self._status.removed
1246 def deleted(self):
1244 def deleted(self):
1247 return self._status.deleted
1245 return self._status.deleted
1248 def branch(self):
1246 def branch(self):
1249 return encoding.tolocal(self._extra['branch'])
1247 return encoding.tolocal(self._extra['branch'])
1250 def closesbranch(self):
1248 def closesbranch(self):
1251 return 'close' in self._extra
1249 return 'close' in self._extra
1252 def extra(self):
1250 def extra(self):
1253 return self._extra
1251 return self._extra
1254
1252
1255 def isinmemory(self):
1253 def isinmemory(self):
1256 return False
1254 return False
1257
1255
1258 def tags(self):
1256 def tags(self):
1259 return []
1257 return []
1260
1258
1261 def bookmarks(self):
1259 def bookmarks(self):
1262 b = []
1260 b = []
1263 for p in self.parents():
1261 for p in self.parents():
1264 b.extend(p.bookmarks())
1262 b.extend(p.bookmarks())
1265 return b
1263 return b
1266
1264
1267 def phase(self):
1265 def phase(self):
1268 phase = phases.draft # default phase to draft
1266 phase = phases.draft # default phase to draft
1269 for p in self.parents():
1267 for p in self.parents():
1270 phase = max(phase, p.phase())
1268 phase = max(phase, p.phase())
1271 return phase
1269 return phase
1272
1270
1273 def hidden(self):
1271 def hidden(self):
1274 return False
1272 return False
1275
1273
1276 def children(self):
1274 def children(self):
1277 return []
1275 return []
1278
1276
1279 def flags(self, path):
1277 def flags(self, path):
1280 if r'_manifest' in self.__dict__:
1278 if r'_manifest' in self.__dict__:
1281 try:
1279 try:
1282 return self._manifest.flags(path)
1280 return self._manifest.flags(path)
1283 except KeyError:
1281 except KeyError:
1284 return ''
1282 return ''
1285
1283
1286 try:
1284 try:
1287 return self._flagfunc(path)
1285 return self._flagfunc(path)
1288 except OSError:
1286 except OSError:
1289 return ''
1287 return ''
1290
1288
1291 def ancestor(self, c2):
1289 def ancestor(self, c2):
1292 """return the "best" ancestor context of self and c2"""
1290 """return the "best" ancestor context of self and c2"""
1293 return self._parents[0].ancestor(c2) # punt on two parents for now
1291 return self._parents[0].ancestor(c2) # punt on two parents for now
1294
1292
1295 def walk(self, match):
1293 def walk(self, match):
1296 '''Generates matching file names.'''
1294 '''Generates matching file names.'''
1297 return sorted(self._repo.dirstate.walk(match,
1295 return sorted(self._repo.dirstate.walk(match,
1298 subrepos=sorted(self.substate),
1296 subrepos=sorted(self.substate),
1299 unknown=True, ignored=False))
1297 unknown=True, ignored=False))
1300
1298
1301 def matches(self, match):
1299 def matches(self, match):
1302 return sorted(self._repo.dirstate.matches(match))
1300 return sorted(self._repo.dirstate.matches(match))
1303
1301
1304 def ancestors(self):
1302 def ancestors(self):
1305 for p in self._parents:
1303 for p in self._parents:
1306 yield p
1304 yield p
1307 for a in self._repo.changelog.ancestors(
1305 for a in self._repo.changelog.ancestors(
1308 [p.rev() for p in self._parents]):
1306 [p.rev() for p in self._parents]):
1309 yield changectx(self._repo, a)
1307 yield changectx(self._repo, a)
1310
1308
1311 def markcommitted(self, node):
1309 def markcommitted(self, node):
1312 """Perform post-commit cleanup necessary after committing this ctx
1310 """Perform post-commit cleanup necessary after committing this ctx
1313
1311
1314 Specifically, this updates backing stores this working context
1312 Specifically, this updates backing stores this working context
1315 wraps to reflect the fact that the changes reflected by this
1313 wraps to reflect the fact that the changes reflected by this
1316 workingctx have been committed. For example, it marks
1314 workingctx have been committed. For example, it marks
1317 modified and added files as normal in the dirstate.
1315 modified and added files as normal in the dirstate.
1318
1316
1319 """
1317 """
1320
1318
1321 with self._repo.dirstate.parentchange():
1319 with self._repo.dirstate.parentchange():
1322 for f in self.modified() + self.added():
1320 for f in self.modified() + self.added():
1323 self._repo.dirstate.normal(f)
1321 self._repo.dirstate.normal(f)
1324 for f in self.removed():
1322 for f in self.removed():
1325 self._repo.dirstate.drop(f)
1323 self._repo.dirstate.drop(f)
1326 self._repo.dirstate.setparents(node)
1324 self._repo.dirstate.setparents(node)
1327
1325
1328 # write changes out explicitly, because nesting wlock at
1326 # write changes out explicitly, because nesting wlock at
1329 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1327 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1330 # from immediately doing so for subsequent changing files
1328 # from immediately doing so for subsequent changing files
1331 self._repo.dirstate.write(self._repo.currenttransaction())
1329 self._repo.dirstate.write(self._repo.currenttransaction())
1332
1330
1333 def dirty(self, missing=False, merge=True, branch=True):
1331 def dirty(self, missing=False, merge=True, branch=True):
1334 return False
1332 return False
1335
1333
1336 class workingctx(committablectx):
1334 class workingctx(committablectx):
1337 """A workingctx object makes access to data related to
1335 """A workingctx object makes access to data related to
1338 the current working directory convenient.
1336 the current working directory convenient.
1339 date - any valid date string or (unixtime, offset), or None.
1337 date - any valid date string or (unixtime, offset), or None.
1340 user - username string, or None.
1338 user - username string, or None.
1341 extra - a dictionary of extra values, or None.
1339 extra - a dictionary of extra values, or None.
1342 changes - a list of file lists as returned by localrepo.status()
1340 changes - a list of file lists as returned by localrepo.status()
1343 or None to use the repository status.
1341 or None to use the repository status.
1344 """
1342 """
1345 def __init__(self, repo, text="", user=None, date=None, extra=None,
1343 def __init__(self, repo, text="", user=None, date=None, extra=None,
1346 changes=None):
1344 changes=None):
1347 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1345 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1348
1346
1349 def __iter__(self):
1347 def __iter__(self):
1350 d = self._repo.dirstate
1348 d = self._repo.dirstate
1351 for f in d:
1349 for f in d:
1352 if d[f] != 'r':
1350 if d[f] != 'r':
1353 yield f
1351 yield f
1354
1352
1355 def __contains__(self, key):
1353 def __contains__(self, key):
1356 return self._repo.dirstate[key] not in "?r"
1354 return self._repo.dirstate[key] not in "?r"
1357
1355
1358 def hex(self):
1356 def hex(self):
1359 return hex(wdirid)
1357 return hex(wdirid)
1360
1358
1361 @propertycache
1359 @propertycache
1362 def _parents(self):
1360 def _parents(self):
1363 p = self._repo.dirstate.parents()
1361 p = self._repo.dirstate.parents()
1364 if p[1] == nullid:
1362 if p[1] == nullid:
1365 p = p[:-1]
1363 p = p[:-1]
1366 return [changectx(self._repo, x) for x in p]
1364 return [changectx(self._repo, x) for x in p]
1367
1365
1368 def filectx(self, path, filelog=None):
1366 def filectx(self, path, filelog=None):
1369 """get a file context from the working directory"""
1367 """get a file context from the working directory"""
1370 return workingfilectx(self._repo, path, workingctx=self,
1368 return workingfilectx(self._repo, path, workingctx=self,
1371 filelog=filelog)
1369 filelog=filelog)
1372
1370
1373 def dirty(self, missing=False, merge=True, branch=True):
1371 def dirty(self, missing=False, merge=True, branch=True):
1374 "check whether a working directory is modified"
1372 "check whether a working directory is modified"
1375 # check subrepos first
1373 # check subrepos first
1376 for s in sorted(self.substate):
1374 for s in sorted(self.substate):
1377 if self.sub(s).dirty(missing=missing):
1375 if self.sub(s).dirty(missing=missing):
1378 return True
1376 return True
1379 # check current working dir
1377 # check current working dir
1380 return ((merge and self.p2()) or
1378 return ((merge and self.p2()) or
1381 (branch and self.branch() != self.p1().branch()) or
1379 (branch and self.branch() != self.p1().branch()) or
1382 self.modified() or self.added() or self.removed() or
1380 self.modified() or self.added() or self.removed() or
1383 (missing and self.deleted()))
1381 (missing and self.deleted()))
1384
1382
1385 def add(self, list, prefix=""):
1383 def add(self, list, prefix=""):
1386 with self._repo.wlock():
1384 with self._repo.wlock():
1387 ui, ds = self._repo.ui, self._repo.dirstate
1385 ui, ds = self._repo.ui, self._repo.dirstate
1388 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1386 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1389 rejected = []
1387 rejected = []
1390 lstat = self._repo.wvfs.lstat
1388 lstat = self._repo.wvfs.lstat
1391 for f in list:
1389 for f in list:
1392 # ds.pathto() returns an absolute file when this is invoked from
1390 # ds.pathto() returns an absolute file when this is invoked from
1393 # the keyword extension. That gets flagged as non-portable on
1391 # the keyword extension. That gets flagged as non-portable on
1394 # Windows, since it contains the drive letter and colon.
1392 # Windows, since it contains the drive letter and colon.
1395 scmutil.checkportable(ui, os.path.join(prefix, f))
1393 scmutil.checkportable(ui, os.path.join(prefix, f))
1396 try:
1394 try:
1397 st = lstat(f)
1395 st = lstat(f)
1398 except OSError:
1396 except OSError:
1399 ui.warn(_("%s does not exist!\n") % uipath(f))
1397 ui.warn(_("%s does not exist!\n") % uipath(f))
1400 rejected.append(f)
1398 rejected.append(f)
1401 continue
1399 continue
1402 if st.st_size > 10000000:
1400 if st.st_size > 10000000:
1403 ui.warn(_("%s: up to %d MB of RAM may be required "
1401 ui.warn(_("%s: up to %d MB of RAM may be required "
1404 "to manage this file\n"
1402 "to manage this file\n"
1405 "(use 'hg revert %s' to cancel the "
1403 "(use 'hg revert %s' to cancel the "
1406 "pending addition)\n")
1404 "pending addition)\n")
1407 % (f, 3 * st.st_size // 1000000, uipath(f)))
1405 % (f, 3 * st.st_size // 1000000, uipath(f)))
1408 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1406 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1409 ui.warn(_("%s not added: only files and symlinks "
1407 ui.warn(_("%s not added: only files and symlinks "
1410 "supported currently\n") % uipath(f))
1408 "supported currently\n") % uipath(f))
1411 rejected.append(f)
1409 rejected.append(f)
1412 elif ds[f] in 'amn':
1410 elif ds[f] in 'amn':
1413 ui.warn(_("%s already tracked!\n") % uipath(f))
1411 ui.warn(_("%s already tracked!\n") % uipath(f))
1414 elif ds[f] == 'r':
1412 elif ds[f] == 'r':
1415 ds.normallookup(f)
1413 ds.normallookup(f)
1416 else:
1414 else:
1417 ds.add(f)
1415 ds.add(f)
1418 return rejected
1416 return rejected
1419
1417
1420 def forget(self, files, prefix=""):
1418 def forget(self, files, prefix=""):
1421 with self._repo.wlock():
1419 with self._repo.wlock():
1422 ds = self._repo.dirstate
1420 ds = self._repo.dirstate
1423 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1421 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1424 rejected = []
1422 rejected = []
1425 for f in files:
1423 for f in files:
1426 if f not in self._repo.dirstate:
1424 if f not in self._repo.dirstate:
1427 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1425 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1428 rejected.append(f)
1426 rejected.append(f)
1429 elif self._repo.dirstate[f] != 'a':
1427 elif self._repo.dirstate[f] != 'a':
1430 self._repo.dirstate.remove(f)
1428 self._repo.dirstate.remove(f)
1431 else:
1429 else:
1432 self._repo.dirstate.drop(f)
1430 self._repo.dirstate.drop(f)
1433 return rejected
1431 return rejected
1434
1432
1435 def undelete(self, list):
1433 def undelete(self, list):
1436 pctxs = self.parents()
1434 pctxs = self.parents()
1437 with self._repo.wlock():
1435 with self._repo.wlock():
1438 ds = self._repo.dirstate
1436 ds = self._repo.dirstate
1439 for f in list:
1437 for f in list:
1440 if self._repo.dirstate[f] != 'r':
1438 if self._repo.dirstate[f] != 'r':
1441 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1439 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1442 else:
1440 else:
1443 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1441 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1444 t = fctx.data()
1442 t = fctx.data()
1445 self._repo.wwrite(f, t, fctx.flags())
1443 self._repo.wwrite(f, t, fctx.flags())
1446 self._repo.dirstate.normal(f)
1444 self._repo.dirstate.normal(f)
1447
1445
1448 def copy(self, source, dest):
1446 def copy(self, source, dest):
1449 try:
1447 try:
1450 st = self._repo.wvfs.lstat(dest)
1448 st = self._repo.wvfs.lstat(dest)
1451 except OSError as err:
1449 except OSError as err:
1452 if err.errno != errno.ENOENT:
1450 if err.errno != errno.ENOENT:
1453 raise
1451 raise
1454 self._repo.ui.warn(_("%s does not exist!\n")
1452 self._repo.ui.warn(_("%s does not exist!\n")
1455 % self._repo.dirstate.pathto(dest))
1453 % self._repo.dirstate.pathto(dest))
1456 return
1454 return
1457 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1455 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1458 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1456 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1459 "symbolic link\n")
1457 "symbolic link\n")
1460 % self._repo.dirstate.pathto(dest))
1458 % self._repo.dirstate.pathto(dest))
1461 else:
1459 else:
1462 with self._repo.wlock():
1460 with self._repo.wlock():
1463 if self._repo.dirstate[dest] in '?':
1461 if self._repo.dirstate[dest] in '?':
1464 self._repo.dirstate.add(dest)
1462 self._repo.dirstate.add(dest)
1465 elif self._repo.dirstate[dest] in 'r':
1463 elif self._repo.dirstate[dest] in 'r':
1466 self._repo.dirstate.normallookup(dest)
1464 self._repo.dirstate.normallookup(dest)
1467 self._repo.dirstate.copy(source, dest)
1465 self._repo.dirstate.copy(source, dest)
1468
1466
1469 def match(self, pats=None, include=None, exclude=None, default='glob',
1467 def match(self, pats=None, include=None, exclude=None, default='glob',
1470 listsubrepos=False, badfn=None):
1468 listsubrepos=False, badfn=None):
1471 r = self._repo
1469 r = self._repo
1472
1470
1473 # Only a case insensitive filesystem needs magic to translate user input
1471 # Only a case insensitive filesystem needs magic to translate user input
1474 # to actual case in the filesystem.
1472 # to actual case in the filesystem.
1475 icasefs = not util.fscasesensitive(r.root)
1473 icasefs = not util.fscasesensitive(r.root)
1476 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1474 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1477 default, auditor=r.auditor, ctx=self,
1475 default, auditor=r.auditor, ctx=self,
1478 listsubrepos=listsubrepos, badfn=badfn,
1476 listsubrepos=listsubrepos, badfn=badfn,
1479 icasefs=icasefs)
1477 icasefs=icasefs)
1480
1478
1481 def _filtersuspectsymlink(self, files):
1479 def _filtersuspectsymlink(self, files):
1482 if not files or self._repo.dirstate._checklink:
1480 if not files or self._repo.dirstate._checklink:
1483 return files
1481 return files
1484
1482
1485 # Symlink placeholders may get non-symlink-like contents
1483 # Symlink placeholders may get non-symlink-like contents
1486 # via user error or dereferencing by NFS or Samba servers,
1484 # via user error or dereferencing by NFS or Samba servers,
1487 # so we filter out any placeholders that don't look like a
1485 # so we filter out any placeholders that don't look like a
1488 # symlink
1486 # symlink
1489 sane = []
1487 sane = []
1490 for f in files:
1488 for f in files:
1491 if self.flags(f) == 'l':
1489 if self.flags(f) == 'l':
1492 d = self[f].data()
1490 d = self[f].data()
1493 if (d == '' or len(d) >= 1024 or '\n' in d
1491 if (d == '' or len(d) >= 1024 or '\n' in d
1494 or stringutil.binary(d)):
1492 or stringutil.binary(d)):
1495 self._repo.ui.debug('ignoring suspect symlink placeholder'
1493 self._repo.ui.debug('ignoring suspect symlink placeholder'
1496 ' "%s"\n' % f)
1494 ' "%s"\n' % f)
1497 continue
1495 continue
1498 sane.append(f)
1496 sane.append(f)
1499 return sane
1497 return sane
1500
1498
1501 def _checklookup(self, files):
1499 def _checklookup(self, files):
1502 # check for any possibly clean files
1500 # check for any possibly clean files
1503 if not files:
1501 if not files:
1504 return [], [], []
1502 return [], [], []
1505
1503
1506 modified = []
1504 modified = []
1507 deleted = []
1505 deleted = []
1508 fixup = []
1506 fixup = []
1509 pctx = self._parents[0]
1507 pctx = self._parents[0]
1510 # do a full compare of any files that might have changed
1508 # do a full compare of any files that might have changed
1511 for f in sorted(files):
1509 for f in sorted(files):
1512 try:
1510 try:
1513 # This will return True for a file that got replaced by a
1511 # This will return True for a file that got replaced by a
1514 # directory in the interim, but fixing that is pretty hard.
1512 # directory in the interim, but fixing that is pretty hard.
1515 if (f not in pctx or self.flags(f) != pctx.flags(f)
1513 if (f not in pctx or self.flags(f) != pctx.flags(f)
1516 or pctx[f].cmp(self[f])):
1514 or pctx[f].cmp(self[f])):
1517 modified.append(f)
1515 modified.append(f)
1518 else:
1516 else:
1519 fixup.append(f)
1517 fixup.append(f)
1520 except (IOError, OSError):
1518 except (IOError, OSError):
1521 # A file become inaccessible in between? Mark it as deleted,
1519 # A file become inaccessible in between? Mark it as deleted,
1522 # matching dirstate behavior (issue5584).
1520 # matching dirstate behavior (issue5584).
1523 # The dirstate has more complex behavior around whether a
1521 # The dirstate has more complex behavior around whether a
1524 # missing file matches a directory, etc, but we don't need to
1522 # missing file matches a directory, etc, but we don't need to
1525 # bother with that: if f has made it to this point, we're sure
1523 # bother with that: if f has made it to this point, we're sure
1526 # it's in the dirstate.
1524 # it's in the dirstate.
1527 deleted.append(f)
1525 deleted.append(f)
1528
1526
1529 return modified, deleted, fixup
1527 return modified, deleted, fixup
1530
1528
1531 def _poststatusfixup(self, status, fixup):
1529 def _poststatusfixup(self, status, fixup):
1532 """update dirstate for files that are actually clean"""
1530 """update dirstate for files that are actually clean"""
1533 poststatus = self._repo.postdsstatus()
1531 poststatus = self._repo.postdsstatus()
1534 if fixup or poststatus:
1532 if fixup or poststatus:
1535 try:
1533 try:
1536 oldid = self._repo.dirstate.identity()
1534 oldid = self._repo.dirstate.identity()
1537
1535
1538 # updating the dirstate is optional
1536 # updating the dirstate is optional
1539 # so we don't wait on the lock
1537 # so we don't wait on the lock
1540 # wlock can invalidate the dirstate, so cache normal _after_
1538 # wlock can invalidate the dirstate, so cache normal _after_
1541 # taking the lock
1539 # taking the lock
1542 with self._repo.wlock(False):
1540 with self._repo.wlock(False):
1543 if self._repo.dirstate.identity() == oldid:
1541 if self._repo.dirstate.identity() == oldid:
1544 if fixup:
1542 if fixup:
1545 normal = self._repo.dirstate.normal
1543 normal = self._repo.dirstate.normal
1546 for f in fixup:
1544 for f in fixup:
1547 normal(f)
1545 normal(f)
1548 # write changes out explicitly, because nesting
1546 # write changes out explicitly, because nesting
1549 # wlock at runtime may prevent 'wlock.release()'
1547 # wlock at runtime may prevent 'wlock.release()'
1550 # after this block from doing so for subsequent
1548 # after this block from doing so for subsequent
1551 # changing files
1549 # changing files
1552 tr = self._repo.currenttransaction()
1550 tr = self._repo.currenttransaction()
1553 self._repo.dirstate.write(tr)
1551 self._repo.dirstate.write(tr)
1554
1552
1555 if poststatus:
1553 if poststatus:
1556 for ps in poststatus:
1554 for ps in poststatus:
1557 ps(self, status)
1555 ps(self, status)
1558 else:
1556 else:
1559 # in this case, writing changes out breaks
1557 # in this case, writing changes out breaks
1560 # consistency, because .hg/dirstate was
1558 # consistency, because .hg/dirstate was
1561 # already changed simultaneously after last
1559 # already changed simultaneously after last
1562 # caching (see also issue5584 for detail)
1560 # caching (see also issue5584 for detail)
1563 self._repo.ui.debug('skip updating dirstate: '
1561 self._repo.ui.debug('skip updating dirstate: '
1564 'identity mismatch\n')
1562 'identity mismatch\n')
1565 except error.LockError:
1563 except error.LockError:
1566 pass
1564 pass
1567 finally:
1565 finally:
1568 # Even if the wlock couldn't be grabbed, clear out the list.
1566 # Even if the wlock couldn't be grabbed, clear out the list.
1569 self._repo.clearpostdsstatus()
1567 self._repo.clearpostdsstatus()
1570
1568
1571 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1569 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1572 '''Gets the status from the dirstate -- internal use only.'''
1570 '''Gets the status from the dirstate -- internal use only.'''
1573 subrepos = []
1571 subrepos = []
1574 if '.hgsub' in self:
1572 if '.hgsub' in self:
1575 subrepos = sorted(self.substate)
1573 subrepos = sorted(self.substate)
1576 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1574 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1577 clean=clean, unknown=unknown)
1575 clean=clean, unknown=unknown)
1578
1576
1579 # check for any possibly clean files
1577 # check for any possibly clean files
1580 fixup = []
1578 fixup = []
1581 if cmp:
1579 if cmp:
1582 modified2, deleted2, fixup = self._checklookup(cmp)
1580 modified2, deleted2, fixup = self._checklookup(cmp)
1583 s.modified.extend(modified2)
1581 s.modified.extend(modified2)
1584 s.deleted.extend(deleted2)
1582 s.deleted.extend(deleted2)
1585
1583
1586 if fixup and clean:
1584 if fixup and clean:
1587 s.clean.extend(fixup)
1585 s.clean.extend(fixup)
1588
1586
1589 self._poststatusfixup(s, fixup)
1587 self._poststatusfixup(s, fixup)
1590
1588
1591 if match.always():
1589 if match.always():
1592 # cache for performance
1590 # cache for performance
1593 if s.unknown or s.ignored or s.clean:
1591 if s.unknown or s.ignored or s.clean:
1594 # "_status" is cached with list*=False in the normal route
1592 # "_status" is cached with list*=False in the normal route
1595 self._status = scmutil.status(s.modified, s.added, s.removed,
1593 self._status = scmutil.status(s.modified, s.added, s.removed,
1596 s.deleted, [], [], [])
1594 s.deleted, [], [], [])
1597 else:
1595 else:
1598 self._status = s
1596 self._status = s
1599
1597
1600 return s
1598 return s
1601
1599
1602 @propertycache
1600 @propertycache
1603 def _manifest(self):
1601 def _manifest(self):
1604 """generate a manifest corresponding to the values in self._status
1602 """generate a manifest corresponding to the values in self._status
1605
1603
1606 This reuse the file nodeid from parent, but we use special node
1604 This reuse the file nodeid from parent, but we use special node
1607 identifiers for added and modified files. This is used by manifests
1605 identifiers for added and modified files. This is used by manifests
1608 merge to see that files are different and by update logic to avoid
1606 merge to see that files are different and by update logic to avoid
1609 deleting newly added files.
1607 deleting newly added files.
1610 """
1608 """
1611 return self._buildstatusmanifest(self._status)
1609 return self._buildstatusmanifest(self._status)
1612
1610
1613 def _buildstatusmanifest(self, status):
1611 def _buildstatusmanifest(self, status):
1614 """Builds a manifest that includes the given status results."""
1612 """Builds a manifest that includes the given status results."""
1615 parents = self.parents()
1613 parents = self.parents()
1616
1614
1617 man = parents[0].manifest().copy()
1615 man = parents[0].manifest().copy()
1618
1616
1619 ff = self._flagfunc
1617 ff = self._flagfunc
1620 for i, l in ((addednodeid, status.added),
1618 for i, l in ((addednodeid, status.added),
1621 (modifiednodeid, status.modified)):
1619 (modifiednodeid, status.modified)):
1622 for f in l:
1620 for f in l:
1623 man[f] = i
1621 man[f] = i
1624 try:
1622 try:
1625 man.setflag(f, ff(f))
1623 man.setflag(f, ff(f))
1626 except OSError:
1624 except OSError:
1627 pass
1625 pass
1628
1626
1629 for f in status.deleted + status.removed:
1627 for f in status.deleted + status.removed:
1630 if f in man:
1628 if f in man:
1631 del man[f]
1629 del man[f]
1632
1630
1633 return man
1631 return man
1634
1632
1635 def _buildstatus(self, other, s, match, listignored, listclean,
1633 def _buildstatus(self, other, s, match, listignored, listclean,
1636 listunknown):
1634 listunknown):
1637 """build a status with respect to another context
1635 """build a status with respect to another context
1638
1636
1639 This includes logic for maintaining the fast path of status when
1637 This includes logic for maintaining the fast path of status when
1640 comparing the working directory against its parent, which is to skip
1638 comparing the working directory against its parent, which is to skip
1641 building a new manifest if self (working directory) is not comparing
1639 building a new manifest if self (working directory) is not comparing
1642 against its parent (repo['.']).
1640 against its parent (repo['.']).
1643 """
1641 """
1644 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1642 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1645 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1643 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1646 # might have accidentally ended up with the entire contents of the file
1644 # might have accidentally ended up with the entire contents of the file
1647 # they are supposed to be linking to.
1645 # they are supposed to be linking to.
1648 s.modified[:] = self._filtersuspectsymlink(s.modified)
1646 s.modified[:] = self._filtersuspectsymlink(s.modified)
1649 if other != self._repo['.']:
1647 if other != self._repo['.']:
1650 s = super(workingctx, self)._buildstatus(other, s, match,
1648 s = super(workingctx, self)._buildstatus(other, s, match,
1651 listignored, listclean,
1649 listignored, listclean,
1652 listunknown)
1650 listunknown)
1653 return s
1651 return s
1654
1652
1655 def _matchstatus(self, other, match):
1653 def _matchstatus(self, other, match):
1656 """override the match method with a filter for directory patterns
1654 """override the match method with a filter for directory patterns
1657
1655
1658 We use inheritance to customize the match.bad method only in cases of
1656 We use inheritance to customize the match.bad method only in cases of
1659 workingctx since it belongs only to the working directory when
1657 workingctx since it belongs only to the working directory when
1660 comparing against the parent changeset.
1658 comparing against the parent changeset.
1661
1659
1662 If we aren't comparing against the working directory's parent, then we
1660 If we aren't comparing against the working directory's parent, then we
1663 just use the default match object sent to us.
1661 just use the default match object sent to us.
1664 """
1662 """
1665 if other != self._repo['.']:
1663 if other != self._repo['.']:
1666 def bad(f, msg):
1664 def bad(f, msg):
1667 # 'f' may be a directory pattern from 'match.files()',
1665 # 'f' may be a directory pattern from 'match.files()',
1668 # so 'f not in ctx1' is not enough
1666 # so 'f not in ctx1' is not enough
1669 if f not in other and not other.hasdir(f):
1667 if f not in other and not other.hasdir(f):
1670 self._repo.ui.warn('%s: %s\n' %
1668 self._repo.ui.warn('%s: %s\n' %
1671 (self._repo.dirstate.pathto(f), msg))
1669 (self._repo.dirstate.pathto(f), msg))
1672 match.bad = bad
1670 match.bad = bad
1673 return match
1671 return match
1674
1672
1675 def markcommitted(self, node):
1673 def markcommitted(self, node):
1676 super(workingctx, self).markcommitted(node)
1674 super(workingctx, self).markcommitted(node)
1677
1675
1678 sparse.aftercommit(self._repo, node)
1676 sparse.aftercommit(self._repo, node)
1679
1677
1680 class committablefilectx(basefilectx):
1678 class committablefilectx(basefilectx):
1681 """A committablefilectx provides common functionality for a file context
1679 """A committablefilectx provides common functionality for a file context
1682 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1680 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1683 def __init__(self, repo, path, filelog=None, ctx=None):
1681 def __init__(self, repo, path, filelog=None, ctx=None):
1684 self._repo = repo
1682 self._repo = repo
1685 self._path = path
1683 self._path = path
1686 self._changeid = None
1684 self._changeid = None
1687 self._filerev = self._filenode = None
1685 self._filerev = self._filenode = None
1688
1686
1689 if filelog is not None:
1687 if filelog is not None:
1690 self._filelog = filelog
1688 self._filelog = filelog
1691 if ctx:
1689 if ctx:
1692 self._changectx = ctx
1690 self._changectx = ctx
1693
1691
1694 def __nonzero__(self):
1692 def __nonzero__(self):
1695 return True
1693 return True
1696
1694
1697 __bool__ = __nonzero__
1695 __bool__ = __nonzero__
1698
1696
1699 def linkrev(self):
1697 def linkrev(self):
1700 # linked to self._changectx no matter if file is modified or not
1698 # linked to self._changectx no matter if file is modified or not
1701 return self.rev()
1699 return self.rev()
1702
1700
1703 def parents(self):
1701 def parents(self):
1704 '''return parent filectxs, following copies if necessary'''
1702 '''return parent filectxs, following copies if necessary'''
1705 def filenode(ctx, path):
1703 def filenode(ctx, path):
1706 return ctx._manifest.get(path, nullid)
1704 return ctx._manifest.get(path, nullid)
1707
1705
1708 path = self._path
1706 path = self._path
1709 fl = self._filelog
1707 fl = self._filelog
1710 pcl = self._changectx._parents
1708 pcl = self._changectx._parents
1711 renamed = self.renamed()
1709 renamed = self.renamed()
1712
1710
1713 if renamed:
1711 if renamed:
1714 pl = [renamed + (None,)]
1712 pl = [renamed + (None,)]
1715 else:
1713 else:
1716 pl = [(path, filenode(pcl[0], path), fl)]
1714 pl = [(path, filenode(pcl[0], path), fl)]
1717
1715
1718 for pc in pcl[1:]:
1716 for pc in pcl[1:]:
1719 pl.append((path, filenode(pc, path), fl))
1717 pl.append((path, filenode(pc, path), fl))
1720
1718
1721 return [self._parentfilectx(p, fileid=n, filelog=l)
1719 return [self._parentfilectx(p, fileid=n, filelog=l)
1722 for p, n, l in pl if n != nullid]
1720 for p, n, l in pl if n != nullid]
1723
1721
1724 def children(self):
1722 def children(self):
1725 return []
1723 return []
1726
1724
1727 class workingfilectx(committablefilectx):
1725 class workingfilectx(committablefilectx):
1728 """A workingfilectx object makes access to data related to a particular
1726 """A workingfilectx object makes access to data related to a particular
1729 file in the working directory convenient."""
1727 file in the working directory convenient."""
1730 def __init__(self, repo, path, filelog=None, workingctx=None):
1728 def __init__(self, repo, path, filelog=None, workingctx=None):
1731 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1729 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1732
1730
1733 @propertycache
1731 @propertycache
1734 def _changectx(self):
1732 def _changectx(self):
1735 return workingctx(self._repo)
1733 return workingctx(self._repo)
1736
1734
1737 def data(self):
1735 def data(self):
1738 return self._repo.wread(self._path)
1736 return self._repo.wread(self._path)
1739 def renamed(self):
1737 def renamed(self):
1740 rp = self._repo.dirstate.copied(self._path)
1738 rp = self._repo.dirstate.copied(self._path)
1741 if not rp:
1739 if not rp:
1742 return None
1740 return None
1743 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1741 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1744
1742
1745 def size(self):
1743 def size(self):
1746 return self._repo.wvfs.lstat(self._path).st_size
1744 return self._repo.wvfs.lstat(self._path).st_size
1747 def date(self):
1745 def date(self):
1748 t, tz = self._changectx.date()
1746 t, tz = self._changectx.date()
1749 try:
1747 try:
1750 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1748 return (self._repo.wvfs.lstat(self._path)[stat.ST_MTIME], tz)
1751 except OSError as err:
1749 except OSError as err:
1752 if err.errno != errno.ENOENT:
1750 if err.errno != errno.ENOENT:
1753 raise
1751 raise
1754 return (t, tz)
1752 return (t, tz)
1755
1753
1756 def exists(self):
1754 def exists(self):
1757 return self._repo.wvfs.exists(self._path)
1755 return self._repo.wvfs.exists(self._path)
1758
1756
1759 def lexists(self):
1757 def lexists(self):
1760 return self._repo.wvfs.lexists(self._path)
1758 return self._repo.wvfs.lexists(self._path)
1761
1759
1762 def audit(self):
1760 def audit(self):
1763 return self._repo.wvfs.audit(self._path)
1761 return self._repo.wvfs.audit(self._path)
1764
1762
1765 def cmp(self, fctx):
1763 def cmp(self, fctx):
1766 """compare with other file context
1764 """compare with other file context
1767
1765
1768 returns True if different than fctx.
1766 returns True if different than fctx.
1769 """
1767 """
1770 # fctx should be a filectx (not a workingfilectx)
1768 # fctx should be a filectx (not a workingfilectx)
1771 # invert comparison to reuse the same code path
1769 # invert comparison to reuse the same code path
1772 return fctx.cmp(self)
1770 return fctx.cmp(self)
1773
1771
1774 def remove(self, ignoremissing=False):
1772 def remove(self, ignoremissing=False):
1775 """wraps unlink for a repo's working directory"""
1773 """wraps unlink for a repo's working directory"""
1776 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1774 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1777
1775
1778 def write(self, data, flags, backgroundclose=False, **kwargs):
1776 def write(self, data, flags, backgroundclose=False, **kwargs):
1779 """wraps repo.wwrite"""
1777 """wraps repo.wwrite"""
1780 self._repo.wwrite(self._path, data, flags,
1778 self._repo.wwrite(self._path, data, flags,
1781 backgroundclose=backgroundclose,
1779 backgroundclose=backgroundclose,
1782 **kwargs)
1780 **kwargs)
1783
1781
1784 def markcopied(self, src):
1782 def markcopied(self, src):
1785 """marks this file a copy of `src`"""
1783 """marks this file a copy of `src`"""
1786 if self._repo.dirstate[self._path] in "nma":
1784 if self._repo.dirstate[self._path] in "nma":
1787 self._repo.dirstate.copy(src, self._path)
1785 self._repo.dirstate.copy(src, self._path)
1788
1786
1789 def clearunknown(self):
1787 def clearunknown(self):
1790 """Removes conflicting items in the working directory so that
1788 """Removes conflicting items in the working directory so that
1791 ``write()`` can be called successfully.
1789 ``write()`` can be called successfully.
1792 """
1790 """
1793 wvfs = self._repo.wvfs
1791 wvfs = self._repo.wvfs
1794 f = self._path
1792 f = self._path
1795 wvfs.audit(f)
1793 wvfs.audit(f)
1796 if wvfs.isdir(f) and not wvfs.islink(f):
1794 if wvfs.isdir(f) and not wvfs.islink(f):
1797 wvfs.rmtree(f, forcibly=True)
1795 wvfs.rmtree(f, forcibly=True)
1798 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1796 if self._repo.ui.configbool('experimental', 'merge.checkpathconflicts'):
1799 for p in reversed(list(util.finddirs(f))):
1797 for p in reversed(list(util.finddirs(f))):
1800 if wvfs.isfileorlink(p):
1798 if wvfs.isfileorlink(p):
1801 wvfs.unlink(p)
1799 wvfs.unlink(p)
1802 break
1800 break
1803
1801
1804 def setflags(self, l, x):
1802 def setflags(self, l, x):
1805 self._repo.wvfs.setflags(self._path, l, x)
1803 self._repo.wvfs.setflags(self._path, l, x)
1806
1804
1807 class overlayworkingctx(committablectx):
1805 class overlayworkingctx(committablectx):
1808 """Wraps another mutable context with a write-back cache that can be
1806 """Wraps another mutable context with a write-back cache that can be
1809 converted into a commit context.
1807 converted into a commit context.
1810
1808
1811 self._cache[path] maps to a dict with keys: {
1809 self._cache[path] maps to a dict with keys: {
1812 'exists': bool?
1810 'exists': bool?
1813 'date': date?
1811 'date': date?
1814 'data': str?
1812 'data': str?
1815 'flags': str?
1813 'flags': str?
1816 'copied': str? (path or None)
1814 'copied': str? (path or None)
1817 }
1815 }
1818 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1816 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1819 is `False`, the file was deleted.
1817 is `False`, the file was deleted.
1820 """
1818 """
1821
1819
1822 def __init__(self, repo):
1820 def __init__(self, repo):
1823 super(overlayworkingctx, self).__init__(repo)
1821 super(overlayworkingctx, self).__init__(repo)
1824 self.clean()
1822 self.clean()
1825
1823
1826 def setbase(self, wrappedctx):
1824 def setbase(self, wrappedctx):
1827 self._wrappedctx = wrappedctx
1825 self._wrappedctx = wrappedctx
1828 self._parents = [wrappedctx]
1826 self._parents = [wrappedctx]
1829 # Drop old manifest cache as it is now out of date.
1827 # Drop old manifest cache as it is now out of date.
1830 # This is necessary when, e.g., rebasing several nodes with one
1828 # This is necessary when, e.g., rebasing several nodes with one
1831 # ``overlayworkingctx`` (e.g. with --collapse).
1829 # ``overlayworkingctx`` (e.g. with --collapse).
1832 util.clearcachedproperty(self, '_manifest')
1830 util.clearcachedproperty(self, '_manifest')
1833
1831
1834 def data(self, path):
1832 def data(self, path):
1835 if self.isdirty(path):
1833 if self.isdirty(path):
1836 if self._cache[path]['exists']:
1834 if self._cache[path]['exists']:
1837 if self._cache[path]['data']:
1835 if self._cache[path]['data']:
1838 return self._cache[path]['data']
1836 return self._cache[path]['data']
1839 else:
1837 else:
1840 # Must fallback here, too, because we only set flags.
1838 # Must fallback here, too, because we only set flags.
1841 return self._wrappedctx[path].data()
1839 return self._wrappedctx[path].data()
1842 else:
1840 else:
1843 raise error.ProgrammingError("No such file or directory: %s" %
1841 raise error.ProgrammingError("No such file or directory: %s" %
1844 path)
1842 path)
1845 else:
1843 else:
1846 return self._wrappedctx[path].data()
1844 return self._wrappedctx[path].data()
1847
1845
1848 @propertycache
1846 @propertycache
1849 def _manifest(self):
1847 def _manifest(self):
1850 parents = self.parents()
1848 parents = self.parents()
1851 man = parents[0].manifest().copy()
1849 man = parents[0].manifest().copy()
1852
1850
1853 flag = self._flagfunc
1851 flag = self._flagfunc
1854 for path in self.added():
1852 for path in self.added():
1855 man[path] = addednodeid
1853 man[path] = addednodeid
1856 man.setflag(path, flag(path))
1854 man.setflag(path, flag(path))
1857 for path in self.modified():
1855 for path in self.modified():
1858 man[path] = modifiednodeid
1856 man[path] = modifiednodeid
1859 man.setflag(path, flag(path))
1857 man.setflag(path, flag(path))
1860 for path in self.removed():
1858 for path in self.removed():
1861 del man[path]
1859 del man[path]
1862 return man
1860 return man
1863
1861
1864 @propertycache
1862 @propertycache
1865 def _flagfunc(self):
1863 def _flagfunc(self):
1866 def f(path):
1864 def f(path):
1867 return self._cache[path]['flags']
1865 return self._cache[path]['flags']
1868 return f
1866 return f
1869
1867
1870 def files(self):
1868 def files(self):
1871 return sorted(self.added() + self.modified() + self.removed())
1869 return sorted(self.added() + self.modified() + self.removed())
1872
1870
1873 def modified(self):
1871 def modified(self):
1874 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1872 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1875 self._existsinparent(f)]
1873 self._existsinparent(f)]
1876
1874
1877 def added(self):
1875 def added(self):
1878 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1876 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
1879 not self._existsinparent(f)]
1877 not self._existsinparent(f)]
1880
1878
1881 def removed(self):
1879 def removed(self):
1882 return [f for f in self._cache.keys() if
1880 return [f for f in self._cache.keys() if
1883 not self._cache[f]['exists'] and self._existsinparent(f)]
1881 not self._cache[f]['exists'] and self._existsinparent(f)]
1884
1882
1885 def isinmemory(self):
1883 def isinmemory(self):
1886 return True
1884 return True
1887
1885
1888 def filedate(self, path):
1886 def filedate(self, path):
1889 if self.isdirty(path):
1887 if self.isdirty(path):
1890 return self._cache[path]['date']
1888 return self._cache[path]['date']
1891 else:
1889 else:
1892 return self._wrappedctx[path].date()
1890 return self._wrappedctx[path].date()
1893
1891
1894 def markcopied(self, path, origin):
1892 def markcopied(self, path, origin):
1895 if self.isdirty(path):
1893 if self.isdirty(path):
1896 self._cache[path]['copied'] = origin
1894 self._cache[path]['copied'] = origin
1897 else:
1895 else:
1898 raise error.ProgrammingError('markcopied() called on clean context')
1896 raise error.ProgrammingError('markcopied() called on clean context')
1899
1897
1900 def copydata(self, path):
1898 def copydata(self, path):
1901 if self.isdirty(path):
1899 if self.isdirty(path):
1902 return self._cache[path]['copied']
1900 return self._cache[path]['copied']
1903 else:
1901 else:
1904 raise error.ProgrammingError('copydata() called on clean context')
1902 raise error.ProgrammingError('copydata() called on clean context')
1905
1903
1906 def flags(self, path):
1904 def flags(self, path):
1907 if self.isdirty(path):
1905 if self.isdirty(path):
1908 if self._cache[path]['exists']:
1906 if self._cache[path]['exists']:
1909 return self._cache[path]['flags']
1907 return self._cache[path]['flags']
1910 else:
1908 else:
1911 raise error.ProgrammingError("No such file or directory: %s" %
1909 raise error.ProgrammingError("No such file or directory: %s" %
1912 self._path)
1910 self._path)
1913 else:
1911 else:
1914 return self._wrappedctx[path].flags()
1912 return self._wrappedctx[path].flags()
1915
1913
1916 def _existsinparent(self, path):
1914 def _existsinparent(self, path):
1917 try:
1915 try:
1918 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1916 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
1919 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1917 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
1920 # with an ``exists()`` function.
1918 # with an ``exists()`` function.
1921 self._wrappedctx[path]
1919 self._wrappedctx[path]
1922 return True
1920 return True
1923 except error.ManifestLookupError:
1921 except error.ManifestLookupError:
1924 return False
1922 return False
1925
1923
1926 def _auditconflicts(self, path):
1924 def _auditconflicts(self, path):
1927 """Replicates conflict checks done by wvfs.write().
1925 """Replicates conflict checks done by wvfs.write().
1928
1926
1929 Since we never write to the filesystem and never call `applyupdates` in
1927 Since we never write to the filesystem and never call `applyupdates` in
1930 IMM, we'll never check that a path is actually writable -- e.g., because
1928 IMM, we'll never check that a path is actually writable -- e.g., because
1931 it adds `a/foo`, but `a` is actually a file in the other commit.
1929 it adds `a/foo`, but `a` is actually a file in the other commit.
1932 """
1930 """
1933 def fail(path, component):
1931 def fail(path, component):
1934 # p1() is the base and we're receiving "writes" for p2()'s
1932 # p1() is the base and we're receiving "writes" for p2()'s
1935 # files.
1933 # files.
1936 if 'l' in self.p1()[component].flags():
1934 if 'l' in self.p1()[component].flags():
1937 raise error.Abort("error: %s conflicts with symlink %s "
1935 raise error.Abort("error: %s conflicts with symlink %s "
1938 "in %s." % (path, component,
1936 "in %s." % (path, component,
1939 self.p1().rev()))
1937 self.p1().rev()))
1940 else:
1938 else:
1941 raise error.Abort("error: '%s' conflicts with file '%s' in "
1939 raise error.Abort("error: '%s' conflicts with file '%s' in "
1942 "%s." % (path, component,
1940 "%s." % (path, component,
1943 self.p1().rev()))
1941 self.p1().rev()))
1944
1942
1945 # Test that each new directory to be created to write this path from p2
1943 # Test that each new directory to be created to write this path from p2
1946 # is not a file in p1.
1944 # is not a file in p1.
1947 components = path.split('/')
1945 components = path.split('/')
1948 for i in xrange(len(components)):
1946 for i in xrange(len(components)):
1949 component = "/".join(components[0:i])
1947 component = "/".join(components[0:i])
1950 if component in self.p1():
1948 if component in self.p1():
1951 fail(path, component)
1949 fail(path, component)
1952
1950
1953 # Test the other direction -- that this path from p2 isn't a directory
1951 # Test the other direction -- that this path from p2 isn't a directory
1954 # in p1 (test that p1 doesn't any paths matching `path/*`).
1952 # in p1 (test that p1 doesn't any paths matching `path/*`).
1955 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1953 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
1956 matches = self.p1().manifest().matches(match)
1954 matches = self.p1().manifest().matches(match)
1957 if len(matches) > 0:
1955 if len(matches) > 0:
1958 if len(matches) == 1 and matches.keys()[0] == path:
1956 if len(matches) == 1 and matches.keys()[0] == path:
1959 return
1957 return
1960 raise error.Abort("error: file '%s' cannot be written because "
1958 raise error.Abort("error: file '%s' cannot be written because "
1961 " '%s/' is a folder in %s (containing %d "
1959 " '%s/' is a folder in %s (containing %d "
1962 "entries: %s)"
1960 "entries: %s)"
1963 % (path, path, self.p1(), len(matches),
1961 % (path, path, self.p1(), len(matches),
1964 ', '.join(matches.keys())))
1962 ', '.join(matches.keys())))
1965
1963
1966 def write(self, path, data, flags='', **kwargs):
1964 def write(self, path, data, flags='', **kwargs):
1967 if data is None:
1965 if data is None:
1968 raise error.ProgrammingError("data must be non-None")
1966 raise error.ProgrammingError("data must be non-None")
1969 self._auditconflicts(path)
1967 self._auditconflicts(path)
1970 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1968 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
1971 flags=flags)
1969 flags=flags)
1972
1970
1973 def setflags(self, path, l, x):
1971 def setflags(self, path, l, x):
1974 self._markdirty(path, exists=True, date=dateutil.makedate(),
1972 self._markdirty(path, exists=True, date=dateutil.makedate(),
1975 flags=(l and 'l' or '') + (x and 'x' or ''))
1973 flags=(l and 'l' or '') + (x and 'x' or ''))
1976
1974
1977 def remove(self, path):
1975 def remove(self, path):
1978 self._markdirty(path, exists=False)
1976 self._markdirty(path, exists=False)
1979
1977
1980 def exists(self, path):
1978 def exists(self, path):
1981 """exists behaves like `lexists`, but needs to follow symlinks and
1979 """exists behaves like `lexists`, but needs to follow symlinks and
1982 return False if they are broken.
1980 return False if they are broken.
1983 """
1981 """
1984 if self.isdirty(path):
1982 if self.isdirty(path):
1985 # If this path exists and is a symlink, "follow" it by calling
1983 # If this path exists and is a symlink, "follow" it by calling
1986 # exists on the destination path.
1984 # exists on the destination path.
1987 if (self._cache[path]['exists'] and
1985 if (self._cache[path]['exists'] and
1988 'l' in self._cache[path]['flags']):
1986 'l' in self._cache[path]['flags']):
1989 return self.exists(self._cache[path]['data'].strip())
1987 return self.exists(self._cache[path]['data'].strip())
1990 else:
1988 else:
1991 return self._cache[path]['exists']
1989 return self._cache[path]['exists']
1992
1990
1993 return self._existsinparent(path)
1991 return self._existsinparent(path)
1994
1992
1995 def lexists(self, path):
1993 def lexists(self, path):
1996 """lexists returns True if the path exists"""
1994 """lexists returns True if the path exists"""
1997 if self.isdirty(path):
1995 if self.isdirty(path):
1998 return self._cache[path]['exists']
1996 return self._cache[path]['exists']
1999
1997
2000 return self._existsinparent(path)
1998 return self._existsinparent(path)
2001
1999
2002 def size(self, path):
2000 def size(self, path):
2003 if self.isdirty(path):
2001 if self.isdirty(path):
2004 if self._cache[path]['exists']:
2002 if self._cache[path]['exists']:
2005 return len(self._cache[path]['data'])
2003 return len(self._cache[path]['data'])
2006 else:
2004 else:
2007 raise error.ProgrammingError("No such file or directory: %s" %
2005 raise error.ProgrammingError("No such file or directory: %s" %
2008 self._path)
2006 self._path)
2009 return self._wrappedctx[path].size()
2007 return self._wrappedctx[path].size()
2010
2008
2011 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2009 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2012 user=None, editor=None):
2010 user=None, editor=None):
2013 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2011 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2014 committed.
2012 committed.
2015
2013
2016 ``text`` is the commit message.
2014 ``text`` is the commit message.
2017 ``parents`` (optional) are rev numbers.
2015 ``parents`` (optional) are rev numbers.
2018 """
2016 """
2019 # Default parents to the wrapped contexts' if not passed.
2017 # Default parents to the wrapped contexts' if not passed.
2020 if parents is None:
2018 if parents is None:
2021 parents = self._wrappedctx.parents()
2019 parents = self._wrappedctx.parents()
2022 if len(parents) == 1:
2020 if len(parents) == 1:
2023 parents = (parents[0], None)
2021 parents = (parents[0], None)
2024
2022
2025 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2023 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2026 if parents[1] is None:
2024 if parents[1] is None:
2027 parents = (self._repo[parents[0]], None)
2025 parents = (self._repo[parents[0]], None)
2028 else:
2026 else:
2029 parents = (self._repo[parents[0]], self._repo[parents[1]])
2027 parents = (self._repo[parents[0]], self._repo[parents[1]])
2030
2028
2031 files = self._cache.keys()
2029 files = self._cache.keys()
2032 def getfile(repo, memctx, path):
2030 def getfile(repo, memctx, path):
2033 if self._cache[path]['exists']:
2031 if self._cache[path]['exists']:
2034 return memfilectx(repo, memctx, path,
2032 return memfilectx(repo, memctx, path,
2035 self._cache[path]['data'],
2033 self._cache[path]['data'],
2036 'l' in self._cache[path]['flags'],
2034 'l' in self._cache[path]['flags'],
2037 'x' in self._cache[path]['flags'],
2035 'x' in self._cache[path]['flags'],
2038 self._cache[path]['copied'])
2036 self._cache[path]['copied'])
2039 else:
2037 else:
2040 # Returning None, but including the path in `files`, is
2038 # Returning None, but including the path in `files`, is
2041 # necessary for memctx to register a deletion.
2039 # necessary for memctx to register a deletion.
2042 return None
2040 return None
2043 return memctx(self._repo, parents, text, files, getfile, date=date,
2041 return memctx(self._repo, parents, text, files, getfile, date=date,
2044 extra=extra, user=user, branch=branch, editor=editor)
2042 extra=extra, user=user, branch=branch, editor=editor)
2045
2043
2046 def isdirty(self, path):
2044 def isdirty(self, path):
2047 return path in self._cache
2045 return path in self._cache
2048
2046
2049 def isempty(self):
2047 def isempty(self):
2050 # We need to discard any keys that are actually clean before the empty
2048 # We need to discard any keys that are actually clean before the empty
2051 # commit check.
2049 # commit check.
2052 self._compact()
2050 self._compact()
2053 return len(self._cache) == 0
2051 return len(self._cache) == 0
2054
2052
2055 def clean(self):
2053 def clean(self):
2056 self._cache = {}
2054 self._cache = {}
2057
2055
2058 def _compact(self):
2056 def _compact(self):
2059 """Removes keys from the cache that are actually clean, by comparing
2057 """Removes keys from the cache that are actually clean, by comparing
2060 them with the underlying context.
2058 them with the underlying context.
2061
2059
2062 This can occur during the merge process, e.g. by passing --tool :local
2060 This can occur during the merge process, e.g. by passing --tool :local
2063 to resolve a conflict.
2061 to resolve a conflict.
2064 """
2062 """
2065 keys = []
2063 keys = []
2066 for path in self._cache.keys():
2064 for path in self._cache.keys():
2067 cache = self._cache[path]
2065 cache = self._cache[path]
2068 try:
2066 try:
2069 underlying = self._wrappedctx[path]
2067 underlying = self._wrappedctx[path]
2070 if (underlying.data() == cache['data'] and
2068 if (underlying.data() == cache['data'] and
2071 underlying.flags() == cache['flags']):
2069 underlying.flags() == cache['flags']):
2072 keys.append(path)
2070 keys.append(path)
2073 except error.ManifestLookupError:
2071 except error.ManifestLookupError:
2074 # Path not in the underlying manifest (created).
2072 # Path not in the underlying manifest (created).
2075 continue
2073 continue
2076
2074
2077 for path in keys:
2075 for path in keys:
2078 del self._cache[path]
2076 del self._cache[path]
2079 return keys
2077 return keys
2080
2078
2081 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2079 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2082 self._cache[path] = {
2080 self._cache[path] = {
2083 'exists': exists,
2081 'exists': exists,
2084 'data': data,
2082 'data': data,
2085 'date': date,
2083 'date': date,
2086 'flags': flags,
2084 'flags': flags,
2087 'copied': None,
2085 'copied': None,
2088 }
2086 }
2089
2087
2090 def filectx(self, path, filelog=None):
2088 def filectx(self, path, filelog=None):
2091 return overlayworkingfilectx(self._repo, path, parent=self,
2089 return overlayworkingfilectx(self._repo, path, parent=self,
2092 filelog=filelog)
2090 filelog=filelog)
2093
2091
2094 class overlayworkingfilectx(committablefilectx):
2092 class overlayworkingfilectx(committablefilectx):
2095 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2093 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2096 cache, which can be flushed through later by calling ``flush()``."""
2094 cache, which can be flushed through later by calling ``flush()``."""
2097
2095
2098 def __init__(self, repo, path, filelog=None, parent=None):
2096 def __init__(self, repo, path, filelog=None, parent=None):
2099 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2097 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2100 parent)
2098 parent)
2101 self._repo = repo
2099 self._repo = repo
2102 self._parent = parent
2100 self._parent = parent
2103 self._path = path
2101 self._path = path
2104
2102
2105 def cmp(self, fctx):
2103 def cmp(self, fctx):
2106 return self.data() != fctx.data()
2104 return self.data() != fctx.data()
2107
2105
2108 def changectx(self):
2106 def changectx(self):
2109 return self._parent
2107 return self._parent
2110
2108
2111 def data(self):
2109 def data(self):
2112 return self._parent.data(self._path)
2110 return self._parent.data(self._path)
2113
2111
2114 def date(self):
2112 def date(self):
2115 return self._parent.filedate(self._path)
2113 return self._parent.filedate(self._path)
2116
2114
2117 def exists(self):
2115 def exists(self):
2118 return self.lexists()
2116 return self.lexists()
2119
2117
2120 def lexists(self):
2118 def lexists(self):
2121 return self._parent.exists(self._path)
2119 return self._parent.exists(self._path)
2122
2120
2123 def renamed(self):
2121 def renamed(self):
2124 path = self._parent.copydata(self._path)
2122 path = self._parent.copydata(self._path)
2125 if not path:
2123 if not path:
2126 return None
2124 return None
2127 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2125 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2128
2126
2129 def size(self):
2127 def size(self):
2130 return self._parent.size(self._path)
2128 return self._parent.size(self._path)
2131
2129
2132 def markcopied(self, origin):
2130 def markcopied(self, origin):
2133 self._parent.markcopied(self._path, origin)
2131 self._parent.markcopied(self._path, origin)
2134
2132
2135 def audit(self):
2133 def audit(self):
2136 pass
2134 pass
2137
2135
2138 def flags(self):
2136 def flags(self):
2139 return self._parent.flags(self._path)
2137 return self._parent.flags(self._path)
2140
2138
2141 def setflags(self, islink, isexec):
2139 def setflags(self, islink, isexec):
2142 return self._parent.setflags(self._path, islink, isexec)
2140 return self._parent.setflags(self._path, islink, isexec)
2143
2141
2144 def write(self, data, flags, backgroundclose=False, **kwargs):
2142 def write(self, data, flags, backgroundclose=False, **kwargs):
2145 return self._parent.write(self._path, data, flags, **kwargs)
2143 return self._parent.write(self._path, data, flags, **kwargs)
2146
2144
2147 def remove(self, ignoremissing=False):
2145 def remove(self, ignoremissing=False):
2148 return self._parent.remove(self._path)
2146 return self._parent.remove(self._path)
2149
2147
2150 def clearunknown(self):
2148 def clearunknown(self):
2151 pass
2149 pass
2152
2150
2153 class workingcommitctx(workingctx):
2151 class workingcommitctx(workingctx):
2154 """A workingcommitctx object makes access to data related to
2152 """A workingcommitctx object makes access to data related to
2155 the revision being committed convenient.
2153 the revision being committed convenient.
2156
2154
2157 This hides changes in the working directory, if they aren't
2155 This hides changes in the working directory, if they aren't
2158 committed in this context.
2156 committed in this context.
2159 """
2157 """
2160 def __init__(self, repo, changes,
2158 def __init__(self, repo, changes,
2161 text="", user=None, date=None, extra=None):
2159 text="", user=None, date=None, extra=None):
2162 super(workingctx, self).__init__(repo, text, user, date, extra,
2160 super(workingctx, self).__init__(repo, text, user, date, extra,
2163 changes)
2161 changes)
2164
2162
2165 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2163 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2166 """Return matched files only in ``self._status``
2164 """Return matched files only in ``self._status``
2167
2165
2168 Uncommitted files appear "clean" via this context, even if
2166 Uncommitted files appear "clean" via this context, even if
2169 they aren't actually so in the working directory.
2167 they aren't actually so in the working directory.
2170 """
2168 """
2171 if clean:
2169 if clean:
2172 clean = [f for f in self._manifest if f not in self._changedset]
2170 clean = [f for f in self._manifest if f not in self._changedset]
2173 else:
2171 else:
2174 clean = []
2172 clean = []
2175 return scmutil.status([f for f in self._status.modified if match(f)],
2173 return scmutil.status([f for f in self._status.modified if match(f)],
2176 [f for f in self._status.added if match(f)],
2174 [f for f in self._status.added if match(f)],
2177 [f for f in self._status.removed if match(f)],
2175 [f for f in self._status.removed if match(f)],
2178 [], [], [], clean)
2176 [], [], [], clean)
2179
2177
2180 @propertycache
2178 @propertycache
2181 def _changedset(self):
2179 def _changedset(self):
2182 """Return the set of files changed in this context
2180 """Return the set of files changed in this context
2183 """
2181 """
2184 changed = set(self._status.modified)
2182 changed = set(self._status.modified)
2185 changed.update(self._status.added)
2183 changed.update(self._status.added)
2186 changed.update(self._status.removed)
2184 changed.update(self._status.removed)
2187 return changed
2185 return changed
2188
2186
2189 def makecachingfilectxfn(func):
2187 def makecachingfilectxfn(func):
2190 """Create a filectxfn that caches based on the path.
2188 """Create a filectxfn that caches based on the path.
2191
2189
2192 We can't use util.cachefunc because it uses all arguments as the cache
2190 We can't use util.cachefunc because it uses all arguments as the cache
2193 key and this creates a cycle since the arguments include the repo and
2191 key and this creates a cycle since the arguments include the repo and
2194 memctx.
2192 memctx.
2195 """
2193 """
2196 cache = {}
2194 cache = {}
2197
2195
2198 def getfilectx(repo, memctx, path):
2196 def getfilectx(repo, memctx, path):
2199 if path not in cache:
2197 if path not in cache:
2200 cache[path] = func(repo, memctx, path)
2198 cache[path] = func(repo, memctx, path)
2201 return cache[path]
2199 return cache[path]
2202
2200
2203 return getfilectx
2201 return getfilectx
2204
2202
2205 def memfilefromctx(ctx):
2203 def memfilefromctx(ctx):
2206 """Given a context return a memfilectx for ctx[path]
2204 """Given a context return a memfilectx for ctx[path]
2207
2205
2208 This is a convenience method for building a memctx based on another
2206 This is a convenience method for building a memctx based on another
2209 context.
2207 context.
2210 """
2208 """
2211 def getfilectx(repo, memctx, path):
2209 def getfilectx(repo, memctx, path):
2212 fctx = ctx[path]
2210 fctx = ctx[path]
2213 # this is weird but apparently we only keep track of one parent
2211 # this is weird but apparently we only keep track of one parent
2214 # (why not only store that instead of a tuple?)
2212 # (why not only store that instead of a tuple?)
2215 copied = fctx.renamed()
2213 copied = fctx.renamed()
2216 if copied:
2214 if copied:
2217 copied = copied[0]
2215 copied = copied[0]
2218 return memfilectx(repo, memctx, path, fctx.data(),
2216 return memfilectx(repo, memctx, path, fctx.data(),
2219 islink=fctx.islink(), isexec=fctx.isexec(),
2217 islink=fctx.islink(), isexec=fctx.isexec(),
2220 copied=copied)
2218 copied=copied)
2221
2219
2222 return getfilectx
2220 return getfilectx
2223
2221
2224 def memfilefrompatch(patchstore):
2222 def memfilefrompatch(patchstore):
2225 """Given a patch (e.g. patchstore object) return a memfilectx
2223 """Given a patch (e.g. patchstore object) return a memfilectx
2226
2224
2227 This is a convenience method for building a memctx based on a patchstore.
2225 This is a convenience method for building a memctx based on a patchstore.
2228 """
2226 """
2229 def getfilectx(repo, memctx, path):
2227 def getfilectx(repo, memctx, path):
2230 data, mode, copied = patchstore.getfile(path)
2228 data, mode, copied = patchstore.getfile(path)
2231 if data is None:
2229 if data is None:
2232 return None
2230 return None
2233 islink, isexec = mode
2231 islink, isexec = mode
2234 return memfilectx(repo, memctx, path, data, islink=islink,
2232 return memfilectx(repo, memctx, path, data, islink=islink,
2235 isexec=isexec, copied=copied)
2233 isexec=isexec, copied=copied)
2236
2234
2237 return getfilectx
2235 return getfilectx
2238
2236
2239 class memctx(committablectx):
2237 class memctx(committablectx):
2240 """Use memctx to perform in-memory commits via localrepo.commitctx().
2238 """Use memctx to perform in-memory commits via localrepo.commitctx().
2241
2239
2242 Revision information is supplied at initialization time while
2240 Revision information is supplied at initialization time while
2243 related files data and is made available through a callback
2241 related files data and is made available through a callback
2244 mechanism. 'repo' is the current localrepo, 'parents' is a
2242 mechanism. 'repo' is the current localrepo, 'parents' is a
2245 sequence of two parent revisions identifiers (pass None for every
2243 sequence of two parent revisions identifiers (pass None for every
2246 missing parent), 'text' is the commit message and 'files' lists
2244 missing parent), 'text' is the commit message and 'files' lists
2247 names of files touched by the revision (normalized and relative to
2245 names of files touched by the revision (normalized and relative to
2248 repository root).
2246 repository root).
2249
2247
2250 filectxfn(repo, memctx, path) is a callable receiving the
2248 filectxfn(repo, memctx, path) is a callable receiving the
2251 repository, the current memctx object and the normalized path of
2249 repository, the current memctx object and the normalized path of
2252 requested file, relative to repository root. It is fired by the
2250 requested file, relative to repository root. It is fired by the
2253 commit function for every file in 'files', but calls order is
2251 commit function for every file in 'files', but calls order is
2254 undefined. If the file is available in the revision being
2252 undefined. If the file is available in the revision being
2255 committed (updated or added), filectxfn returns a memfilectx
2253 committed (updated or added), filectxfn returns a memfilectx
2256 object. If the file was removed, filectxfn return None for recent
2254 object. If the file was removed, filectxfn return None for recent
2257 Mercurial. Moved files are represented by marking the source file
2255 Mercurial. Moved files are represented by marking the source file
2258 removed and the new file added with copy information (see
2256 removed and the new file added with copy information (see
2259 memfilectx).
2257 memfilectx).
2260
2258
2261 user receives the committer name and defaults to current
2259 user receives the committer name and defaults to current
2262 repository username, date is the commit date in any format
2260 repository username, date is the commit date in any format
2263 supported by dateutil.parsedate() and defaults to current date, extra
2261 supported by dateutil.parsedate() and defaults to current date, extra
2264 is a dictionary of metadata or is left empty.
2262 is a dictionary of metadata or is left empty.
2265 """
2263 """
2266
2264
2267 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2265 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2268 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2266 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2269 # this field to determine what to do in filectxfn.
2267 # this field to determine what to do in filectxfn.
2270 _returnnoneformissingfiles = True
2268 _returnnoneformissingfiles = True
2271
2269
2272 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2270 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2273 date=None, extra=None, branch=None, editor=False):
2271 date=None, extra=None, branch=None, editor=False):
2274 super(memctx, self).__init__(repo, text, user, date, extra)
2272 super(memctx, self).__init__(repo, text, user, date, extra)
2275 self._rev = None
2273 self._rev = None
2276 self._node = None
2274 self._node = None
2277 parents = [(p or nullid) for p in parents]
2275 parents = [(p or nullid) for p in parents]
2278 p1, p2 = parents
2276 p1, p2 = parents
2279 self._parents = [self._repo[p] for p in (p1, p2)]
2277 self._parents = [self._repo[p] for p in (p1, p2)]
2280 files = sorted(set(files))
2278 files = sorted(set(files))
2281 self._files = files
2279 self._files = files
2282 if branch is not None:
2280 if branch is not None:
2283 self._extra['branch'] = encoding.fromlocal(branch)
2281 self._extra['branch'] = encoding.fromlocal(branch)
2284 self.substate = {}
2282 self.substate = {}
2285
2283
2286 if isinstance(filectxfn, patch.filestore):
2284 if isinstance(filectxfn, patch.filestore):
2287 filectxfn = memfilefrompatch(filectxfn)
2285 filectxfn = memfilefrompatch(filectxfn)
2288 elif not callable(filectxfn):
2286 elif not callable(filectxfn):
2289 # if store is not callable, wrap it in a function
2287 # if store is not callable, wrap it in a function
2290 filectxfn = memfilefromctx(filectxfn)
2288 filectxfn = memfilefromctx(filectxfn)
2291
2289
2292 # memoizing increases performance for e.g. vcs convert scenarios.
2290 # memoizing increases performance for e.g. vcs convert scenarios.
2293 self._filectxfn = makecachingfilectxfn(filectxfn)
2291 self._filectxfn = makecachingfilectxfn(filectxfn)
2294
2292
2295 if editor:
2293 if editor:
2296 self._text = editor(self._repo, self, [])
2294 self._text = editor(self._repo, self, [])
2297 self._repo.savecommitmessage(self._text)
2295 self._repo.savecommitmessage(self._text)
2298
2296
2299 def filectx(self, path, filelog=None):
2297 def filectx(self, path, filelog=None):
2300 """get a file context from the working directory
2298 """get a file context from the working directory
2301
2299
2302 Returns None if file doesn't exist and should be removed."""
2300 Returns None if file doesn't exist and should be removed."""
2303 return self._filectxfn(self._repo, self, path)
2301 return self._filectxfn(self._repo, self, path)
2304
2302
2305 def commit(self):
2303 def commit(self):
2306 """commit context to the repo"""
2304 """commit context to the repo"""
2307 return self._repo.commitctx(self)
2305 return self._repo.commitctx(self)
2308
2306
2309 @propertycache
2307 @propertycache
2310 def _manifest(self):
2308 def _manifest(self):
2311 """generate a manifest based on the return values of filectxfn"""
2309 """generate a manifest based on the return values of filectxfn"""
2312
2310
2313 # keep this simple for now; just worry about p1
2311 # keep this simple for now; just worry about p1
2314 pctx = self._parents[0]
2312 pctx = self._parents[0]
2315 man = pctx.manifest().copy()
2313 man = pctx.manifest().copy()
2316
2314
2317 for f in self._status.modified:
2315 for f in self._status.modified:
2318 p1node = nullid
2316 p1node = nullid
2319 p2node = nullid
2317 p2node = nullid
2320 p = pctx[f].parents() # if file isn't in pctx, check p2?
2318 p = pctx[f].parents() # if file isn't in pctx, check p2?
2321 if len(p) > 0:
2319 if len(p) > 0:
2322 p1node = p[0].filenode()
2320 p1node = p[0].filenode()
2323 if len(p) > 1:
2321 if len(p) > 1:
2324 p2node = p[1].filenode()
2322 p2node = p[1].filenode()
2325 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2323 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2326
2324
2327 for f in self._status.added:
2325 for f in self._status.added:
2328 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2326 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2329
2327
2330 for f in self._status.removed:
2328 for f in self._status.removed:
2331 if f in man:
2329 if f in man:
2332 del man[f]
2330 del man[f]
2333
2331
2334 return man
2332 return man
2335
2333
2336 @propertycache
2334 @propertycache
2337 def _status(self):
2335 def _status(self):
2338 """Calculate exact status from ``files`` specified at construction
2336 """Calculate exact status from ``files`` specified at construction
2339 """
2337 """
2340 man1 = self.p1().manifest()
2338 man1 = self.p1().manifest()
2341 p2 = self._parents[1]
2339 p2 = self._parents[1]
2342 # "1 < len(self._parents)" can't be used for checking
2340 # "1 < len(self._parents)" can't be used for checking
2343 # existence of the 2nd parent, because "memctx._parents" is
2341 # existence of the 2nd parent, because "memctx._parents" is
2344 # explicitly initialized by the list, of which length is 2.
2342 # explicitly initialized by the list, of which length is 2.
2345 if p2.node() != nullid:
2343 if p2.node() != nullid:
2346 man2 = p2.manifest()
2344 man2 = p2.manifest()
2347 managing = lambda f: f in man1 or f in man2
2345 managing = lambda f: f in man1 or f in man2
2348 else:
2346 else:
2349 managing = lambda f: f in man1
2347 managing = lambda f: f in man1
2350
2348
2351 modified, added, removed = [], [], []
2349 modified, added, removed = [], [], []
2352 for f in self._files:
2350 for f in self._files:
2353 if not managing(f):
2351 if not managing(f):
2354 added.append(f)
2352 added.append(f)
2355 elif self[f]:
2353 elif self[f]:
2356 modified.append(f)
2354 modified.append(f)
2357 else:
2355 else:
2358 removed.append(f)
2356 removed.append(f)
2359
2357
2360 return scmutil.status(modified, added, removed, [], [], [], [])
2358 return scmutil.status(modified, added, removed, [], [], [], [])
2361
2359
2362 class memfilectx(committablefilectx):
2360 class memfilectx(committablefilectx):
2363 """memfilectx represents an in-memory file to commit.
2361 """memfilectx represents an in-memory file to commit.
2364
2362
2365 See memctx and committablefilectx for more details.
2363 See memctx and committablefilectx for more details.
2366 """
2364 """
2367 def __init__(self, repo, changectx, path, data, islink=False,
2365 def __init__(self, repo, changectx, path, data, islink=False,
2368 isexec=False, copied=None):
2366 isexec=False, copied=None):
2369 """
2367 """
2370 path is the normalized file path relative to repository root.
2368 path is the normalized file path relative to repository root.
2371 data is the file content as a string.
2369 data is the file content as a string.
2372 islink is True if the file is a symbolic link.
2370 islink is True if the file is a symbolic link.
2373 isexec is True if the file is executable.
2371 isexec is True if the file is executable.
2374 copied is the source file path if current file was copied in the
2372 copied is the source file path if current file was copied in the
2375 revision being committed, or None."""
2373 revision being committed, or None."""
2376 super(memfilectx, self).__init__(repo, path, None, changectx)
2374 super(memfilectx, self).__init__(repo, path, None, changectx)
2377 self._data = data
2375 self._data = data
2378 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2376 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2379 self._copied = None
2377 self._copied = None
2380 if copied:
2378 if copied:
2381 self._copied = (copied, nullid)
2379 self._copied = (copied, nullid)
2382
2380
2383 def data(self):
2381 def data(self):
2384 return self._data
2382 return self._data
2385
2383
2386 def remove(self, ignoremissing=False):
2384 def remove(self, ignoremissing=False):
2387 """wraps unlink for a repo's working directory"""
2385 """wraps unlink for a repo's working directory"""
2388 # need to figure out what to do here
2386 # need to figure out what to do here
2389 del self._changectx[self._path]
2387 del self._changectx[self._path]
2390
2388
2391 def write(self, data, flags, **kwargs):
2389 def write(self, data, flags, **kwargs):
2392 """wraps repo.wwrite"""
2390 """wraps repo.wwrite"""
2393 self._data = data
2391 self._data = data
2394
2392
2395 class overlayfilectx(committablefilectx):
2393 class overlayfilectx(committablefilectx):
2396 """Like memfilectx but take an original filectx and optional parameters to
2394 """Like memfilectx but take an original filectx and optional parameters to
2397 override parts of it. This is useful when fctx.data() is expensive (i.e.
2395 override parts of it. This is useful when fctx.data() is expensive (i.e.
2398 flag processor is expensive) and raw data, flags, and filenode could be
2396 flag processor is expensive) and raw data, flags, and filenode could be
2399 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2397 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2400 """
2398 """
2401
2399
2402 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2400 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2403 copied=None, ctx=None):
2401 copied=None, ctx=None):
2404 """originalfctx: filecontext to duplicate
2402 """originalfctx: filecontext to duplicate
2405
2403
2406 datafunc: None or a function to override data (file content). It is a
2404 datafunc: None or a function to override data (file content). It is a
2407 function to be lazy. path, flags, copied, ctx: None or overridden value
2405 function to be lazy. path, flags, copied, ctx: None or overridden value
2408
2406
2409 copied could be (path, rev), or False. copied could also be just path,
2407 copied could be (path, rev), or False. copied could also be just path,
2410 and will be converted to (path, nullid). This simplifies some callers.
2408 and will be converted to (path, nullid). This simplifies some callers.
2411 """
2409 """
2412
2410
2413 if path is None:
2411 if path is None:
2414 path = originalfctx.path()
2412 path = originalfctx.path()
2415 if ctx is None:
2413 if ctx is None:
2416 ctx = originalfctx.changectx()
2414 ctx = originalfctx.changectx()
2417 ctxmatch = lambda: True
2415 ctxmatch = lambda: True
2418 else:
2416 else:
2419 ctxmatch = lambda: ctx == originalfctx.changectx()
2417 ctxmatch = lambda: ctx == originalfctx.changectx()
2420
2418
2421 repo = originalfctx.repo()
2419 repo = originalfctx.repo()
2422 flog = originalfctx.filelog()
2420 flog = originalfctx.filelog()
2423 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2421 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2424
2422
2425 if copied is None:
2423 if copied is None:
2426 copied = originalfctx.renamed()
2424 copied = originalfctx.renamed()
2427 copiedmatch = lambda: True
2425 copiedmatch = lambda: True
2428 else:
2426 else:
2429 if copied and not isinstance(copied, tuple):
2427 if copied and not isinstance(copied, tuple):
2430 # repo._filecommit will recalculate copyrev so nullid is okay
2428 # repo._filecommit will recalculate copyrev so nullid is okay
2431 copied = (copied, nullid)
2429 copied = (copied, nullid)
2432 copiedmatch = lambda: copied == originalfctx.renamed()
2430 copiedmatch = lambda: copied == originalfctx.renamed()
2433
2431
2434 # When data, copied (could affect data), ctx (could affect filelog
2432 # When data, copied (could affect data), ctx (could affect filelog
2435 # parents) are not overridden, rawdata, rawflags, and filenode may be
2433 # parents) are not overridden, rawdata, rawflags, and filenode may be
2436 # reused (repo._filecommit should double check filelog parents).
2434 # reused (repo._filecommit should double check filelog parents).
2437 #
2435 #
2438 # path, flags are not hashed in filelog (but in manifestlog) so they do
2436 # path, flags are not hashed in filelog (but in manifestlog) so they do
2439 # not affect reusable here.
2437 # not affect reusable here.
2440 #
2438 #
2441 # If ctx or copied is overridden to a same value with originalfctx,
2439 # If ctx or copied is overridden to a same value with originalfctx,
2442 # still consider it's reusable. originalfctx.renamed() may be a bit
2440 # still consider it's reusable. originalfctx.renamed() may be a bit
2443 # expensive so it's not called unless necessary. Assuming datafunc is
2441 # expensive so it's not called unless necessary. Assuming datafunc is
2444 # always expensive, do not call it for this "reusable" test.
2442 # always expensive, do not call it for this "reusable" test.
2445 reusable = datafunc is None and ctxmatch() and copiedmatch()
2443 reusable = datafunc is None and ctxmatch() and copiedmatch()
2446
2444
2447 if datafunc is None:
2445 if datafunc is None:
2448 datafunc = originalfctx.data
2446 datafunc = originalfctx.data
2449 if flags is None:
2447 if flags is None:
2450 flags = originalfctx.flags()
2448 flags = originalfctx.flags()
2451
2449
2452 self._datafunc = datafunc
2450 self._datafunc = datafunc
2453 self._flags = flags
2451 self._flags = flags
2454 self._copied = copied
2452 self._copied = copied
2455
2453
2456 if reusable:
2454 if reusable:
2457 # copy extra fields from originalfctx
2455 # copy extra fields from originalfctx
2458 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2456 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2459 for attr_ in attrs:
2457 for attr_ in attrs:
2460 if util.safehasattr(originalfctx, attr_):
2458 if util.safehasattr(originalfctx, attr_):
2461 setattr(self, attr_, getattr(originalfctx, attr_))
2459 setattr(self, attr_, getattr(originalfctx, attr_))
2462
2460
2463 def data(self):
2461 def data(self):
2464 return self._datafunc()
2462 return self._datafunc()
2465
2463
2466 class metadataonlyctx(committablectx):
2464 class metadataonlyctx(committablectx):
2467 """Like memctx but it's reusing the manifest of different commit.
2465 """Like memctx but it's reusing the manifest of different commit.
2468 Intended to be used by lightweight operations that are creating
2466 Intended to be used by lightweight operations that are creating
2469 metadata-only changes.
2467 metadata-only changes.
2470
2468
2471 Revision information is supplied at initialization time. 'repo' is the
2469 Revision information is supplied at initialization time. 'repo' is the
2472 current localrepo, 'ctx' is original revision which manifest we're reuisng
2470 current localrepo, 'ctx' is original revision which manifest we're reuisng
2473 'parents' is a sequence of two parent revisions identifiers (pass None for
2471 'parents' is a sequence of two parent revisions identifiers (pass None for
2474 every missing parent), 'text' is the commit.
2472 every missing parent), 'text' is the commit.
2475
2473
2476 user receives the committer name and defaults to current repository
2474 user receives the committer name and defaults to current repository
2477 username, date is the commit date in any format supported by
2475 username, date is the commit date in any format supported by
2478 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2476 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2479 metadata or is left empty.
2477 metadata or is left empty.
2480 """
2478 """
2481 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2479 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2482 date=None, extra=None, editor=False):
2480 date=None, extra=None, editor=False):
2483 if text is None:
2481 if text is None:
2484 text = originalctx.description()
2482 text = originalctx.description()
2485 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2483 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2486 self._rev = None
2484 self._rev = None
2487 self._node = None
2485 self._node = None
2488 self._originalctx = originalctx
2486 self._originalctx = originalctx
2489 self._manifestnode = originalctx.manifestnode()
2487 self._manifestnode = originalctx.manifestnode()
2490 if parents is None:
2488 if parents is None:
2491 parents = originalctx.parents()
2489 parents = originalctx.parents()
2492 else:
2490 else:
2493 parents = [repo[p] for p in parents if p is not None]
2491 parents = [repo[p] for p in parents if p is not None]
2494 parents = parents[:]
2492 parents = parents[:]
2495 while len(parents) < 2:
2493 while len(parents) < 2:
2496 parents.append(repo[nullid])
2494 parents.append(repo[nullid])
2497 p1, p2 = self._parents = parents
2495 p1, p2 = self._parents = parents
2498
2496
2499 # sanity check to ensure that the reused manifest parents are
2497 # sanity check to ensure that the reused manifest parents are
2500 # manifests of our commit parents
2498 # manifests of our commit parents
2501 mp1, mp2 = self.manifestctx().parents
2499 mp1, mp2 = self.manifestctx().parents
2502 if p1 != nullid and p1.manifestnode() != mp1:
2500 if p1 != nullid and p1.manifestnode() != mp1:
2503 raise RuntimeError('can\'t reuse the manifest: '
2501 raise RuntimeError('can\'t reuse the manifest: '
2504 'its p1 doesn\'t match the new ctx p1')
2502 'its p1 doesn\'t match the new ctx p1')
2505 if p2 != nullid and p2.manifestnode() != mp2:
2503 if p2 != nullid and p2.manifestnode() != mp2:
2506 raise RuntimeError('can\'t reuse the manifest: '
2504 raise RuntimeError('can\'t reuse the manifest: '
2507 'its p2 doesn\'t match the new ctx p2')
2505 'its p2 doesn\'t match the new ctx p2')
2508
2506
2509 self._files = originalctx.files()
2507 self._files = originalctx.files()
2510 self.substate = {}
2508 self.substate = {}
2511
2509
2512 if editor:
2510 if editor:
2513 self._text = editor(self._repo, self, [])
2511 self._text = editor(self._repo, self, [])
2514 self._repo.savecommitmessage(self._text)
2512 self._repo.savecommitmessage(self._text)
2515
2513
2516 def manifestnode(self):
2514 def manifestnode(self):
2517 return self._manifestnode
2515 return self._manifestnode
2518
2516
2519 @property
2517 @property
2520 def _manifestctx(self):
2518 def _manifestctx(self):
2521 return self._repo.manifestlog[self._manifestnode]
2519 return self._repo.manifestlog[self._manifestnode]
2522
2520
2523 def filectx(self, path, filelog=None):
2521 def filectx(self, path, filelog=None):
2524 return self._originalctx.filectx(path, filelog=filelog)
2522 return self._originalctx.filectx(path, filelog=filelog)
2525
2523
2526 def commit(self):
2524 def commit(self):
2527 """commit context to the repo"""
2525 """commit context to the repo"""
2528 return self._repo.commitctx(self)
2526 return self._repo.commitctx(self)
2529
2527
2530 @property
2528 @property
2531 def _manifest(self):
2529 def _manifest(self):
2532 return self._originalctx.manifest()
2530 return self._originalctx.manifest()
2533
2531
2534 @propertycache
2532 @propertycache
2535 def _status(self):
2533 def _status(self):
2536 """Calculate exact status from ``files`` specified in the ``origctx``
2534 """Calculate exact status from ``files`` specified in the ``origctx``
2537 and parents manifests.
2535 and parents manifests.
2538 """
2536 """
2539 man1 = self.p1().manifest()
2537 man1 = self.p1().manifest()
2540 p2 = self._parents[1]
2538 p2 = self._parents[1]
2541 # "1 < len(self._parents)" can't be used for checking
2539 # "1 < len(self._parents)" can't be used for checking
2542 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2540 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2543 # explicitly initialized by the list, of which length is 2.
2541 # explicitly initialized by the list, of which length is 2.
2544 if p2.node() != nullid:
2542 if p2.node() != nullid:
2545 man2 = p2.manifest()
2543 man2 = p2.manifest()
2546 managing = lambda f: f in man1 or f in man2
2544 managing = lambda f: f in man1 or f in man2
2547 else:
2545 else:
2548 managing = lambda f: f in man1
2546 managing = lambda f: f in man1
2549
2547
2550 modified, added, removed = [], [], []
2548 modified, added, removed = [], [], []
2551 for f in self._files:
2549 for f in self._files:
2552 if not managing(f):
2550 if not managing(f):
2553 added.append(f)
2551 added.append(f)
2554 elif f in self:
2552 elif f in self:
2555 modified.append(f)
2553 modified.append(f)
2556 else:
2554 else:
2557 removed.append(f)
2555 removed.append(f)
2558
2556
2559 return scmutil.status(modified, added, removed, [], [], [], [])
2557 return scmutil.status(modified, added, removed, [], [], [], [])
2560
2558
2561 class arbitraryfilectx(object):
2559 class arbitraryfilectx(object):
2562 """Allows you to use filectx-like functions on a file in an arbitrary
2560 """Allows you to use filectx-like functions on a file in an arbitrary
2563 location on disk, possibly not in the working directory.
2561 location on disk, possibly not in the working directory.
2564 """
2562 """
2565 def __init__(self, path, repo=None):
2563 def __init__(self, path, repo=None):
2566 # Repo is optional because contrib/simplemerge uses this class.
2564 # Repo is optional because contrib/simplemerge uses this class.
2567 self._repo = repo
2565 self._repo = repo
2568 self._path = path
2566 self._path = path
2569
2567
2570 def cmp(self, fctx):
2568 def cmp(self, fctx):
2571 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2569 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2572 # path if either side is a symlink.
2570 # path if either side is a symlink.
2573 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2571 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2574 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2572 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2575 # Add a fast-path for merge if both sides are disk-backed.
2573 # Add a fast-path for merge if both sides are disk-backed.
2576 # Note that filecmp uses the opposite return values (True if same)
2574 # Note that filecmp uses the opposite return values (True if same)
2577 # from our cmp functions (True if different).
2575 # from our cmp functions (True if different).
2578 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2576 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2579 return self.data() != fctx.data()
2577 return self.data() != fctx.data()
2580
2578
2581 def path(self):
2579 def path(self):
2582 return self._path
2580 return self._path
2583
2581
2584 def flags(self):
2582 def flags(self):
2585 return ''
2583 return ''
2586
2584
2587 def data(self):
2585 def data(self):
2588 return util.readfile(self._path)
2586 return util.readfile(self._path)
2589
2587
2590 def decodeddata(self):
2588 def decodeddata(self):
2591 with open(self._path, "rb") as f:
2589 with open(self._path, "rb") as f:
2592 return f.read()
2590 return f.read()
2593
2591
2594 def remove(self):
2592 def remove(self):
2595 util.unlink(self._path)
2593 util.unlink(self._path)
2596
2594
2597 def write(self, data, flags, **kwargs):
2595 def write(self, data, flags, **kwargs):
2598 assert not flags
2596 assert not flags
2599 with open(self._path, "w") as f:
2597 with open(self._path, "w") as f:
2600 f.write(data)
2598 f.write(data)
General Comments 0
You need to be logged in to leave comments. Login now