##// END OF EJS Templates
context: stop setting None for modified or added nodes...
Augie Fackler -
r23593:b1179dab stable
parent child Browse files
Show More
@@ -1,1733 +1,1738
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex, bin
8 from node import nullid, nullrev, short, hex, bin
9 from i18n import _
9 from i18n import _
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
10 import mdiff, error, util, scmutil, subrepo, patch, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, stat
12 import os, errno, stat
13 import obsolete as obsmod
13 import obsolete as obsmod
14 import repoview
14 import repoview
15 import fileset
15 import fileset
16 import revlog
16 import revlog
17
17
18 propertycache = util.propertycache
18 propertycache = util.propertycache
19
19
20 # Phony node value to stand-in for new files in some uses of
21 # manifests. Manifests support 21-byte hashes for nodes which are
22 # dirty in the working copy.
23 _newnode = '!' * 21
24
20 class basectx(object):
25 class basectx(object):
21 """A basectx object represents the common logic for its children:
26 """A basectx object represents the common logic for its children:
22 changectx: read-only context that is already present in the repo,
27 changectx: read-only context that is already present in the repo,
23 workingctx: a context that represents the working directory and can
28 workingctx: a context that represents the working directory and can
24 be committed,
29 be committed,
25 memctx: a context that represents changes in-memory and can also
30 memctx: a context that represents changes in-memory and can also
26 be committed."""
31 be committed."""
27 def __new__(cls, repo, changeid='', *args, **kwargs):
32 def __new__(cls, repo, changeid='', *args, **kwargs):
28 if isinstance(changeid, basectx):
33 if isinstance(changeid, basectx):
29 return changeid
34 return changeid
30
35
31 o = super(basectx, cls).__new__(cls)
36 o = super(basectx, cls).__new__(cls)
32
37
33 o._repo = repo
38 o._repo = repo
34 o._rev = nullrev
39 o._rev = nullrev
35 o._node = nullid
40 o._node = nullid
36
41
37 return o
42 return o
38
43
39 def __str__(self):
44 def __str__(self):
40 return short(self.node())
45 return short(self.node())
41
46
42 def __int__(self):
47 def __int__(self):
43 return self.rev()
48 return self.rev()
44
49
45 def __repr__(self):
50 def __repr__(self):
46 return "<%s %s>" % (type(self).__name__, str(self))
51 return "<%s %s>" % (type(self).__name__, str(self))
47
52
48 def __eq__(self, other):
53 def __eq__(self, other):
49 try:
54 try:
50 return type(self) == type(other) and self._rev == other._rev
55 return type(self) == type(other) and self._rev == other._rev
51 except AttributeError:
56 except AttributeError:
52 return False
57 return False
53
58
54 def __ne__(self, other):
59 def __ne__(self, other):
55 return not (self == other)
60 return not (self == other)
56
61
57 def __contains__(self, key):
62 def __contains__(self, key):
58 return key in self._manifest
63 return key in self._manifest
59
64
60 def __getitem__(self, key):
65 def __getitem__(self, key):
61 return self.filectx(key)
66 return self.filectx(key)
62
67
63 def __iter__(self):
68 def __iter__(self):
64 for f in sorted(self._manifest):
69 for f in sorted(self._manifest):
65 yield f
70 yield f
66
71
67 def _manifestmatches(self, match, s):
72 def _manifestmatches(self, match, s):
68 """generate a new manifest filtered by the match argument
73 """generate a new manifest filtered by the match argument
69
74
70 This method is for internal use only and mainly exists to provide an
75 This method is for internal use only and mainly exists to provide an
71 object oriented way for other contexts to customize the manifest
76 object oriented way for other contexts to customize the manifest
72 generation.
77 generation.
73 """
78 """
74 if match.always():
79 if match.always():
75 return self.manifest().copy()
80 return self.manifest().copy()
76
81
77 files = match.files()
82 files = match.files()
78 if (match.matchfn == match.exact or
83 if (match.matchfn == match.exact or
79 (not match.anypats() and util.all(fn in self for fn in files))):
84 (not match.anypats() and util.all(fn in self for fn in files))):
80 return self.manifest().intersectfiles(files)
85 return self.manifest().intersectfiles(files)
81
86
82 mf = self.manifest().copy()
87 mf = self.manifest().copy()
83 for fn in mf.keys():
88 for fn in mf.keys():
84 if not match(fn):
89 if not match(fn):
85 del mf[fn]
90 del mf[fn]
86 return mf
91 return mf
87
92
88 def _matchstatus(self, other, s, match, listignored, listclean,
93 def _matchstatus(self, other, s, match, listignored, listclean,
89 listunknown):
94 listunknown):
90 """return match.always if match is none
95 """return match.always if match is none
91
96
92 This internal method provides a way for child objects to override the
97 This internal method provides a way for child objects to override the
93 match operator.
98 match operator.
94 """
99 """
95 return match or matchmod.always(self._repo.root, self._repo.getcwd())
100 return match or matchmod.always(self._repo.root, self._repo.getcwd())
96
101
97 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
102 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
98 """provide a hook to allow child objects to preprocess status results
103 """provide a hook to allow child objects to preprocess status results
99
104
100 For example, this allows other contexts, such as workingctx, to query
105 For example, this allows other contexts, such as workingctx, to query
101 the dirstate before comparing the manifests.
106 the dirstate before comparing the manifests.
102 """
107 """
103 # load earliest manifest first for caching reasons
108 # load earliest manifest first for caching reasons
104 if self.rev() < other.rev():
109 if self.rev() < other.rev():
105 self.manifest()
110 self.manifest()
106 return s
111 return s
107
112
108 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
113 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
109 """provide a hook to allow child objects to postprocess status results
114 """provide a hook to allow child objects to postprocess status results
110
115
111 For example, this allows other contexts, such as workingctx, to filter
116 For example, this allows other contexts, such as workingctx, to filter
112 suspect symlinks in the case of FAT32 and NTFS filesytems.
117 suspect symlinks in the case of FAT32 and NTFS filesytems.
113 """
118 """
114 return s
119 return s
115
120
116 def _buildstatus(self, other, s, match, listignored, listclean,
121 def _buildstatus(self, other, s, match, listignored, listclean,
117 listunknown):
122 listunknown):
118 """build a status with respect to another context"""
123 """build a status with respect to another context"""
119 mf1 = other._manifestmatches(match, s)
124 mf1 = other._manifestmatches(match, s)
120 mf2 = self._manifestmatches(match, s)
125 mf2 = self._manifestmatches(match, s)
121
126
122 modified, added, clean = [], [], []
127 modified, added, clean = [], [], []
123 deleted, unknown, ignored = s[3], s[4], s[5]
128 deleted, unknown, ignored = s[3], s[4], s[5]
124 deletedset = set(deleted)
129 deletedset = set(deleted)
125 withflags = mf1.withflags() | mf2.withflags()
130 withflags = mf1.withflags() | mf2.withflags()
126 for fn, mf2node in mf2.iteritems():
131 for fn, mf2node in mf2.iteritems():
127 if fn in mf1:
132 if fn in mf1:
128 if (fn not in deletedset and
133 if (fn not in deletedset and
129 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
134 ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
130 (mf1[fn] != mf2node and
135 (mf1[fn] != mf2node and
131 (mf2node or self[fn].cmp(other[fn]))))):
136 (mf2node != _newnode or self[fn].cmp(other[fn]))))):
132 modified.append(fn)
137 modified.append(fn)
133 elif listclean:
138 elif listclean:
134 clean.append(fn)
139 clean.append(fn)
135 del mf1[fn]
140 del mf1[fn]
136 elif fn not in deletedset:
141 elif fn not in deletedset:
137 added.append(fn)
142 added.append(fn)
138 removed = mf1.keys()
143 removed = mf1.keys()
139 if removed:
144 if removed:
140 # need to filter files if they are already reported as removed
145 # need to filter files if they are already reported as removed
141 unknown = [fn for fn in unknown if fn not in mf1]
146 unknown = [fn for fn in unknown if fn not in mf1]
142 ignored = [fn for fn in ignored if fn not in mf1]
147 ignored = [fn for fn in ignored if fn not in mf1]
143
148
144 return [modified, added, removed, deleted, unknown, ignored, clean]
149 return [modified, added, removed, deleted, unknown, ignored, clean]
145
150
146 @propertycache
151 @propertycache
147 def substate(self):
152 def substate(self):
148 return subrepo.state(self, self._repo.ui)
153 return subrepo.state(self, self._repo.ui)
149
154
150 def subrev(self, subpath):
155 def subrev(self, subpath):
151 return self.substate[subpath][1]
156 return self.substate[subpath][1]
152
157
153 def rev(self):
158 def rev(self):
154 return self._rev
159 return self._rev
155 def node(self):
160 def node(self):
156 return self._node
161 return self._node
157 def hex(self):
162 def hex(self):
158 return hex(self.node())
163 return hex(self.node())
159 def manifest(self):
164 def manifest(self):
160 return self._manifest
165 return self._manifest
161 def phasestr(self):
166 def phasestr(self):
162 return phases.phasenames[self.phase()]
167 return phases.phasenames[self.phase()]
163 def mutable(self):
168 def mutable(self):
164 return self.phase() > phases.public
169 return self.phase() > phases.public
165
170
166 def getfileset(self, expr):
171 def getfileset(self, expr):
167 return fileset.getfileset(self, expr)
172 return fileset.getfileset(self, expr)
168
173
169 def obsolete(self):
174 def obsolete(self):
170 """True if the changeset is obsolete"""
175 """True if the changeset is obsolete"""
171 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
176 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
172
177
173 def extinct(self):
178 def extinct(self):
174 """True if the changeset is extinct"""
179 """True if the changeset is extinct"""
175 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
180 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
176
181
177 def unstable(self):
182 def unstable(self):
178 """True if the changeset is not obsolete but it's ancestor are"""
183 """True if the changeset is not obsolete but it's ancestor are"""
179 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
184 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
180
185
181 def bumped(self):
186 def bumped(self):
182 """True if the changeset try to be a successor of a public changeset
187 """True if the changeset try to be a successor of a public changeset
183
188
184 Only non-public and non-obsolete changesets may be bumped.
189 Only non-public and non-obsolete changesets may be bumped.
185 """
190 """
186 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
191 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
187
192
188 def divergent(self):
193 def divergent(self):
189 """Is a successors of a changeset with multiple possible successors set
194 """Is a successors of a changeset with multiple possible successors set
190
195
191 Only non-public and non-obsolete changesets may be divergent.
196 Only non-public and non-obsolete changesets may be divergent.
192 """
197 """
193 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
198 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
194
199
195 def troubled(self):
200 def troubled(self):
196 """True if the changeset is either unstable, bumped or divergent"""
201 """True if the changeset is either unstable, bumped or divergent"""
197 return self.unstable() or self.bumped() or self.divergent()
202 return self.unstable() or self.bumped() or self.divergent()
198
203
199 def troubles(self):
204 def troubles(self):
200 """return the list of troubles affecting this changesets.
205 """return the list of troubles affecting this changesets.
201
206
202 Troubles are returned as strings. possible values are:
207 Troubles are returned as strings. possible values are:
203 - unstable,
208 - unstable,
204 - bumped,
209 - bumped,
205 - divergent.
210 - divergent.
206 """
211 """
207 troubles = []
212 troubles = []
208 if self.unstable():
213 if self.unstable():
209 troubles.append('unstable')
214 troubles.append('unstable')
210 if self.bumped():
215 if self.bumped():
211 troubles.append('bumped')
216 troubles.append('bumped')
212 if self.divergent():
217 if self.divergent():
213 troubles.append('divergent')
218 troubles.append('divergent')
214 return troubles
219 return troubles
215
220
216 def parents(self):
221 def parents(self):
217 """return contexts for each parent changeset"""
222 """return contexts for each parent changeset"""
218 return self._parents
223 return self._parents
219
224
220 def p1(self):
225 def p1(self):
221 return self._parents[0]
226 return self._parents[0]
222
227
223 def p2(self):
228 def p2(self):
224 if len(self._parents) == 2:
229 if len(self._parents) == 2:
225 return self._parents[1]
230 return self._parents[1]
226 return changectx(self._repo, -1)
231 return changectx(self._repo, -1)
227
232
228 def _fileinfo(self, path):
233 def _fileinfo(self, path):
229 if '_manifest' in self.__dict__:
234 if '_manifest' in self.__dict__:
230 try:
235 try:
231 return self._manifest[path], self._manifest.flags(path)
236 return self._manifest[path], self._manifest.flags(path)
232 except KeyError:
237 except KeyError:
233 raise error.ManifestLookupError(self._node, path,
238 raise error.ManifestLookupError(self._node, path,
234 _('not found in manifest'))
239 _('not found in manifest'))
235 if '_manifestdelta' in self.__dict__ or path in self.files():
240 if '_manifestdelta' in self.__dict__ or path in self.files():
236 if path in self._manifestdelta:
241 if path in self._manifestdelta:
237 return (self._manifestdelta[path],
242 return (self._manifestdelta[path],
238 self._manifestdelta.flags(path))
243 self._manifestdelta.flags(path))
239 node, flag = self._repo.manifest.find(self._changeset[0], path)
244 node, flag = self._repo.manifest.find(self._changeset[0], path)
240 if not node:
245 if not node:
241 raise error.ManifestLookupError(self._node, path,
246 raise error.ManifestLookupError(self._node, path,
242 _('not found in manifest'))
247 _('not found in manifest'))
243
248
244 return node, flag
249 return node, flag
245
250
246 def filenode(self, path):
251 def filenode(self, path):
247 return self._fileinfo(path)[0]
252 return self._fileinfo(path)[0]
248
253
249 def flags(self, path):
254 def flags(self, path):
250 try:
255 try:
251 return self._fileinfo(path)[1]
256 return self._fileinfo(path)[1]
252 except error.LookupError:
257 except error.LookupError:
253 return ''
258 return ''
254
259
255 def sub(self, path):
260 def sub(self, path):
256 return subrepo.subrepo(self, path)
261 return subrepo.subrepo(self, path)
257
262
258 def match(self, pats=[], include=None, exclude=None, default='glob'):
263 def match(self, pats=[], include=None, exclude=None, default='glob'):
259 r = self._repo
264 r = self._repo
260 return matchmod.match(r.root, r.getcwd(), pats,
265 return matchmod.match(r.root, r.getcwd(), pats,
261 include, exclude, default,
266 include, exclude, default,
262 auditor=r.auditor, ctx=self)
267 auditor=r.auditor, ctx=self)
263
268
264 def diff(self, ctx2=None, match=None, **opts):
269 def diff(self, ctx2=None, match=None, **opts):
265 """Returns a diff generator for the given contexts and matcher"""
270 """Returns a diff generator for the given contexts and matcher"""
266 if ctx2 is None:
271 if ctx2 is None:
267 ctx2 = self.p1()
272 ctx2 = self.p1()
268 if ctx2 is not None:
273 if ctx2 is not None:
269 ctx2 = self._repo[ctx2]
274 ctx2 = self._repo[ctx2]
270 diffopts = patch.diffopts(self._repo.ui, opts)
275 diffopts = patch.diffopts(self._repo.ui, opts)
271 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
276 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
272
277
273 @propertycache
278 @propertycache
274 def _dirs(self):
279 def _dirs(self):
275 return scmutil.dirs(self._manifest)
280 return scmutil.dirs(self._manifest)
276
281
277 def dirs(self):
282 def dirs(self):
278 return self._dirs
283 return self._dirs
279
284
280 def dirty(self, missing=False, merge=True, branch=True):
285 def dirty(self, missing=False, merge=True, branch=True):
281 return False
286 return False
282
287
283 def status(self, other=None, match=None, listignored=False,
288 def status(self, other=None, match=None, listignored=False,
284 listclean=False, listunknown=False, listsubrepos=False):
289 listclean=False, listunknown=False, listsubrepos=False):
285 """return status of files between two nodes or node and working
290 """return status of files between two nodes or node and working
286 directory.
291 directory.
287
292
288 If other is None, compare this node with working directory.
293 If other is None, compare this node with working directory.
289
294
290 returns (modified, added, removed, deleted, unknown, ignored, clean)
295 returns (modified, added, removed, deleted, unknown, ignored, clean)
291 """
296 """
292
297
293 ctx1 = self
298 ctx1 = self
294 ctx2 = self._repo[other]
299 ctx2 = self._repo[other]
295
300
296 # This next code block is, admittedly, fragile logic that tests for
301 # This next code block is, admittedly, fragile logic that tests for
297 # reversing the contexts and wouldn't need to exist if it weren't for
302 # reversing the contexts and wouldn't need to exist if it weren't for
298 # the fast (and common) code path of comparing the working directory
303 # the fast (and common) code path of comparing the working directory
299 # with its first parent.
304 # with its first parent.
300 #
305 #
301 # What we're aiming for here is the ability to call:
306 # What we're aiming for here is the ability to call:
302 #
307 #
303 # workingctx.status(parentctx)
308 # workingctx.status(parentctx)
304 #
309 #
305 # If we always built the manifest for each context and compared those,
310 # If we always built the manifest for each context and compared those,
306 # then we'd be done. But the special case of the above call means we
311 # then we'd be done. But the special case of the above call means we
307 # just copy the manifest of the parent.
312 # just copy the manifest of the parent.
308 reversed = False
313 reversed = False
309 if (not isinstance(ctx1, changectx)
314 if (not isinstance(ctx1, changectx)
310 and isinstance(ctx2, changectx)):
315 and isinstance(ctx2, changectx)):
311 reversed = True
316 reversed = True
312 ctx1, ctx2 = ctx2, ctx1
317 ctx1, ctx2 = ctx2, ctx1
313
318
314 r = [[], [], [], [], [], [], []]
319 r = [[], [], [], [], [], [], []]
315 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
320 match = ctx2._matchstatus(ctx1, r, match, listignored, listclean,
316 listunknown)
321 listunknown)
317 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
322 r = ctx2._prestatus(ctx1, r, match, listignored, listclean, listunknown)
318 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
323 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
319 listunknown)
324 listunknown)
320 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
325 r = ctx2._poststatus(ctx1, r, match, listignored, listclean,
321 listunknown)
326 listunknown)
322
327
323 if reversed:
328 if reversed:
324 # reverse added and removed
329 # reverse added and removed
325 r[1], r[2] = r[2], r[1]
330 r[1], r[2] = r[2], r[1]
326
331
327 if listsubrepos:
332 if listsubrepos:
328 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
333 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
329 rev2 = ctx2.subrev(subpath)
334 rev2 = ctx2.subrev(subpath)
330 try:
335 try:
331 submatch = matchmod.narrowmatcher(subpath, match)
336 submatch = matchmod.narrowmatcher(subpath, match)
332 s = sub.status(rev2, match=submatch, ignored=listignored,
337 s = sub.status(rev2, match=submatch, ignored=listignored,
333 clean=listclean, unknown=listunknown,
338 clean=listclean, unknown=listunknown,
334 listsubrepos=True)
339 listsubrepos=True)
335 for rfiles, sfiles in zip(r, s):
340 for rfiles, sfiles in zip(r, s):
336 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
341 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
337 except error.LookupError:
342 except error.LookupError:
338 self._repo.ui.status(_("skipping missing "
343 self._repo.ui.status(_("skipping missing "
339 "subrepository: %s\n") % subpath)
344 "subrepository: %s\n") % subpath)
340
345
341 for l in r:
346 for l in r:
342 l.sort()
347 l.sort()
343
348
344 # we return a tuple to signify that this list isn't changing
349 # we return a tuple to signify that this list isn't changing
345 return scmutil.status(*r)
350 return scmutil.status(*r)
346
351
347
352
348 def makememctx(repo, parents, text, user, date, branch, files, store,
353 def makememctx(repo, parents, text, user, date, branch, files, store,
349 editor=None):
354 editor=None):
350 def getfilectx(repo, memctx, path):
355 def getfilectx(repo, memctx, path):
351 data, mode, copied = store.getfile(path)
356 data, mode, copied = store.getfile(path)
352 if data is None:
357 if data is None:
353 return None
358 return None
354 islink, isexec = mode
359 islink, isexec = mode
355 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
360 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
356 copied=copied, memctx=memctx)
361 copied=copied, memctx=memctx)
357 extra = {}
362 extra = {}
358 if branch:
363 if branch:
359 extra['branch'] = encoding.fromlocal(branch)
364 extra['branch'] = encoding.fromlocal(branch)
360 ctx = memctx(repo, parents, text, files, getfilectx, user,
365 ctx = memctx(repo, parents, text, files, getfilectx, user,
361 date, extra, editor)
366 date, extra, editor)
362 return ctx
367 return ctx
363
368
364 class changectx(basectx):
369 class changectx(basectx):
365 """A changecontext object makes access to data related to a particular
370 """A changecontext object makes access to data related to a particular
366 changeset convenient. It represents a read-only context already present in
371 changeset convenient. It represents a read-only context already present in
367 the repo."""
372 the repo."""
368 def __init__(self, repo, changeid=''):
373 def __init__(self, repo, changeid=''):
369 """changeid is a revision number, node, or tag"""
374 """changeid is a revision number, node, or tag"""
370
375
371 # since basectx.__new__ already took care of copying the object, we
376 # since basectx.__new__ already took care of copying the object, we
372 # don't need to do anything in __init__, so we just exit here
377 # don't need to do anything in __init__, so we just exit here
373 if isinstance(changeid, basectx):
378 if isinstance(changeid, basectx):
374 return
379 return
375
380
376 if changeid == '':
381 if changeid == '':
377 changeid = '.'
382 changeid = '.'
378 self._repo = repo
383 self._repo = repo
379
384
380 try:
385 try:
381 if isinstance(changeid, int):
386 if isinstance(changeid, int):
382 self._node = repo.changelog.node(changeid)
387 self._node = repo.changelog.node(changeid)
383 self._rev = changeid
388 self._rev = changeid
384 return
389 return
385 if isinstance(changeid, long):
390 if isinstance(changeid, long):
386 changeid = str(changeid)
391 changeid = str(changeid)
387 if changeid == '.':
392 if changeid == '.':
388 self._node = repo.dirstate.p1()
393 self._node = repo.dirstate.p1()
389 self._rev = repo.changelog.rev(self._node)
394 self._rev = repo.changelog.rev(self._node)
390 return
395 return
391 if changeid == 'null':
396 if changeid == 'null':
392 self._node = nullid
397 self._node = nullid
393 self._rev = nullrev
398 self._rev = nullrev
394 return
399 return
395 if changeid == 'tip':
400 if changeid == 'tip':
396 self._node = repo.changelog.tip()
401 self._node = repo.changelog.tip()
397 self._rev = repo.changelog.rev(self._node)
402 self._rev = repo.changelog.rev(self._node)
398 return
403 return
399 if len(changeid) == 20:
404 if len(changeid) == 20:
400 try:
405 try:
401 self._node = changeid
406 self._node = changeid
402 self._rev = repo.changelog.rev(changeid)
407 self._rev = repo.changelog.rev(changeid)
403 return
408 return
404 except error.FilteredRepoLookupError:
409 except error.FilteredRepoLookupError:
405 raise
410 raise
406 except LookupError:
411 except LookupError:
407 pass
412 pass
408
413
409 try:
414 try:
410 r = int(changeid)
415 r = int(changeid)
411 if str(r) != changeid:
416 if str(r) != changeid:
412 raise ValueError
417 raise ValueError
413 l = len(repo.changelog)
418 l = len(repo.changelog)
414 if r < 0:
419 if r < 0:
415 r += l
420 r += l
416 if r < 0 or r >= l:
421 if r < 0 or r >= l:
417 raise ValueError
422 raise ValueError
418 self._rev = r
423 self._rev = r
419 self._node = repo.changelog.node(r)
424 self._node = repo.changelog.node(r)
420 return
425 return
421 except error.FilteredIndexError:
426 except error.FilteredIndexError:
422 raise
427 raise
423 except (ValueError, OverflowError, IndexError):
428 except (ValueError, OverflowError, IndexError):
424 pass
429 pass
425
430
426 if len(changeid) == 40:
431 if len(changeid) == 40:
427 try:
432 try:
428 self._node = bin(changeid)
433 self._node = bin(changeid)
429 self._rev = repo.changelog.rev(self._node)
434 self._rev = repo.changelog.rev(self._node)
430 return
435 return
431 except error.FilteredLookupError:
436 except error.FilteredLookupError:
432 raise
437 raise
433 except (TypeError, LookupError):
438 except (TypeError, LookupError):
434 pass
439 pass
435
440
436 if changeid in repo._bookmarks:
441 if changeid in repo._bookmarks:
437 self._node = repo._bookmarks[changeid]
442 self._node = repo._bookmarks[changeid]
438 self._rev = repo.changelog.rev(self._node)
443 self._rev = repo.changelog.rev(self._node)
439 return
444 return
440 if changeid in repo._tagscache.tags:
445 if changeid in repo._tagscache.tags:
441 self._node = repo._tagscache.tags[changeid]
446 self._node = repo._tagscache.tags[changeid]
442 self._rev = repo.changelog.rev(self._node)
447 self._rev = repo.changelog.rev(self._node)
443 return
448 return
444 try:
449 try:
445 self._node = repo.branchtip(changeid)
450 self._node = repo.branchtip(changeid)
446 self._rev = repo.changelog.rev(self._node)
451 self._rev = repo.changelog.rev(self._node)
447 return
452 return
448 except error.FilteredRepoLookupError:
453 except error.FilteredRepoLookupError:
449 raise
454 raise
450 except error.RepoLookupError:
455 except error.RepoLookupError:
451 pass
456 pass
452
457
453 self._node = repo.unfiltered().changelog._partialmatch(changeid)
458 self._node = repo.unfiltered().changelog._partialmatch(changeid)
454 if self._node is not None:
459 if self._node is not None:
455 self._rev = repo.changelog.rev(self._node)
460 self._rev = repo.changelog.rev(self._node)
456 return
461 return
457
462
458 # lookup failed
463 # lookup failed
459 # check if it might have come from damaged dirstate
464 # check if it might have come from damaged dirstate
460 #
465 #
461 # XXX we could avoid the unfiltered if we had a recognizable
466 # XXX we could avoid the unfiltered if we had a recognizable
462 # exception for filtered changeset access
467 # exception for filtered changeset access
463 if changeid in repo.unfiltered().dirstate.parents():
468 if changeid in repo.unfiltered().dirstate.parents():
464 msg = _("working directory has unknown parent '%s'!")
469 msg = _("working directory has unknown parent '%s'!")
465 raise error.Abort(msg % short(changeid))
470 raise error.Abort(msg % short(changeid))
466 try:
471 try:
467 if len(changeid) == 20:
472 if len(changeid) == 20:
468 changeid = hex(changeid)
473 changeid = hex(changeid)
469 except TypeError:
474 except TypeError:
470 pass
475 pass
471 except (error.FilteredIndexError, error.FilteredLookupError,
476 except (error.FilteredIndexError, error.FilteredLookupError,
472 error.FilteredRepoLookupError):
477 error.FilteredRepoLookupError):
473 if repo.filtername == 'visible':
478 if repo.filtername == 'visible':
474 msg = _("hidden revision '%s'") % changeid
479 msg = _("hidden revision '%s'") % changeid
475 hint = _('use --hidden to access hidden revisions')
480 hint = _('use --hidden to access hidden revisions')
476 raise error.FilteredRepoLookupError(msg, hint=hint)
481 raise error.FilteredRepoLookupError(msg, hint=hint)
477 msg = _("filtered revision '%s' (not in '%s' subset)")
482 msg = _("filtered revision '%s' (not in '%s' subset)")
478 msg %= (changeid, repo.filtername)
483 msg %= (changeid, repo.filtername)
479 raise error.FilteredRepoLookupError(msg)
484 raise error.FilteredRepoLookupError(msg)
480 except IndexError:
485 except IndexError:
481 pass
486 pass
482 raise error.RepoLookupError(
487 raise error.RepoLookupError(
483 _("unknown revision '%s'") % changeid)
488 _("unknown revision '%s'") % changeid)
484
489
485 def __hash__(self):
490 def __hash__(self):
486 try:
491 try:
487 return hash(self._rev)
492 return hash(self._rev)
488 except AttributeError:
493 except AttributeError:
489 return id(self)
494 return id(self)
490
495
491 def __nonzero__(self):
496 def __nonzero__(self):
492 return self._rev != nullrev
497 return self._rev != nullrev
493
498
494 @propertycache
499 @propertycache
495 def _changeset(self):
500 def _changeset(self):
496 return self._repo.changelog.read(self.rev())
501 return self._repo.changelog.read(self.rev())
497
502
498 @propertycache
503 @propertycache
499 def _manifest(self):
504 def _manifest(self):
500 return self._repo.manifest.read(self._changeset[0])
505 return self._repo.manifest.read(self._changeset[0])
501
506
502 @propertycache
507 @propertycache
503 def _manifestdelta(self):
508 def _manifestdelta(self):
504 return self._repo.manifest.readdelta(self._changeset[0])
509 return self._repo.manifest.readdelta(self._changeset[0])
505
510
506 @propertycache
511 @propertycache
507 def _parents(self):
512 def _parents(self):
508 p = self._repo.changelog.parentrevs(self._rev)
513 p = self._repo.changelog.parentrevs(self._rev)
509 if p[1] == nullrev:
514 if p[1] == nullrev:
510 p = p[:-1]
515 p = p[:-1]
511 return [changectx(self._repo, x) for x in p]
516 return [changectx(self._repo, x) for x in p]
512
517
513 def changeset(self):
518 def changeset(self):
514 return self._changeset
519 return self._changeset
515 def manifestnode(self):
520 def manifestnode(self):
516 return self._changeset[0]
521 return self._changeset[0]
517
522
518 def user(self):
523 def user(self):
519 return self._changeset[1]
524 return self._changeset[1]
520 def date(self):
525 def date(self):
521 return self._changeset[2]
526 return self._changeset[2]
522 def files(self):
527 def files(self):
523 return self._changeset[3]
528 return self._changeset[3]
524 def description(self):
529 def description(self):
525 return self._changeset[4]
530 return self._changeset[4]
526 def branch(self):
531 def branch(self):
527 return encoding.tolocal(self._changeset[5].get("branch"))
532 return encoding.tolocal(self._changeset[5].get("branch"))
528 def closesbranch(self):
533 def closesbranch(self):
529 return 'close' in self._changeset[5]
534 return 'close' in self._changeset[5]
530 def extra(self):
535 def extra(self):
531 return self._changeset[5]
536 return self._changeset[5]
532 def tags(self):
537 def tags(self):
533 return self._repo.nodetags(self._node)
538 return self._repo.nodetags(self._node)
534 def bookmarks(self):
539 def bookmarks(self):
535 return self._repo.nodebookmarks(self._node)
540 return self._repo.nodebookmarks(self._node)
536 def phase(self):
541 def phase(self):
537 return self._repo._phasecache.phase(self._repo, self._rev)
542 return self._repo._phasecache.phase(self._repo, self._rev)
538 def hidden(self):
543 def hidden(self):
539 return self._rev in repoview.filterrevs(self._repo, 'visible')
544 return self._rev in repoview.filterrevs(self._repo, 'visible')
540
545
541 def children(self):
546 def children(self):
542 """return contexts for each child changeset"""
547 """return contexts for each child changeset"""
543 c = self._repo.changelog.children(self._node)
548 c = self._repo.changelog.children(self._node)
544 return [changectx(self._repo, x) for x in c]
549 return [changectx(self._repo, x) for x in c]
545
550
546 def ancestors(self):
551 def ancestors(self):
547 for a in self._repo.changelog.ancestors([self._rev]):
552 for a in self._repo.changelog.ancestors([self._rev]):
548 yield changectx(self._repo, a)
553 yield changectx(self._repo, a)
549
554
550 def descendants(self):
555 def descendants(self):
551 for d in self._repo.changelog.descendants([self._rev]):
556 for d in self._repo.changelog.descendants([self._rev]):
552 yield changectx(self._repo, d)
557 yield changectx(self._repo, d)
553
558
554 def filectx(self, path, fileid=None, filelog=None):
559 def filectx(self, path, fileid=None, filelog=None):
555 """get a file context from this changeset"""
560 """get a file context from this changeset"""
556 if fileid is None:
561 if fileid is None:
557 fileid = self.filenode(path)
562 fileid = self.filenode(path)
558 return filectx(self._repo, path, fileid=fileid,
563 return filectx(self._repo, path, fileid=fileid,
559 changectx=self, filelog=filelog)
564 changectx=self, filelog=filelog)
560
565
561 def ancestor(self, c2, warn=False):
566 def ancestor(self, c2, warn=False):
562 """return the "best" ancestor context of self and c2
567 """return the "best" ancestor context of self and c2
563
568
564 If there are multiple candidates, it will show a message and check
569 If there are multiple candidates, it will show a message and check
565 merge.preferancestor configuration before falling back to the
570 merge.preferancestor configuration before falling back to the
566 revlog ancestor."""
571 revlog ancestor."""
567 # deal with workingctxs
572 # deal with workingctxs
568 n2 = c2._node
573 n2 = c2._node
569 if n2 is None:
574 if n2 is None:
570 n2 = c2._parents[0]._node
575 n2 = c2._parents[0]._node
571 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
576 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
572 if not cahs:
577 if not cahs:
573 anc = nullid
578 anc = nullid
574 elif len(cahs) == 1:
579 elif len(cahs) == 1:
575 anc = cahs[0]
580 anc = cahs[0]
576 else:
581 else:
577 for r in self._repo.ui.configlist('merge', 'preferancestor'):
582 for r in self._repo.ui.configlist('merge', 'preferancestor'):
578 try:
583 try:
579 ctx = changectx(self._repo, r)
584 ctx = changectx(self._repo, r)
580 except error.RepoLookupError:
585 except error.RepoLookupError:
581 continue
586 continue
582 anc = ctx.node()
587 anc = ctx.node()
583 if anc in cahs:
588 if anc in cahs:
584 break
589 break
585 else:
590 else:
586 anc = self._repo.changelog.ancestor(self._node, n2)
591 anc = self._repo.changelog.ancestor(self._node, n2)
587 if warn:
592 if warn:
588 self._repo.ui.status(
593 self._repo.ui.status(
589 (_("note: using %s as ancestor of %s and %s\n") %
594 (_("note: using %s as ancestor of %s and %s\n") %
590 (short(anc), short(self._node), short(n2))) +
595 (short(anc), short(self._node), short(n2))) +
591 ''.join(_(" alternatively, use --config "
596 ''.join(_(" alternatively, use --config "
592 "merge.preferancestor=%s\n") %
597 "merge.preferancestor=%s\n") %
593 short(n) for n in sorted(cahs) if n != anc))
598 short(n) for n in sorted(cahs) if n != anc))
594 return changectx(self._repo, anc)
599 return changectx(self._repo, anc)
595
600
596 def descendant(self, other):
601 def descendant(self, other):
597 """True if other is descendant of this changeset"""
602 """True if other is descendant of this changeset"""
598 return self._repo.changelog.descendant(self._rev, other._rev)
603 return self._repo.changelog.descendant(self._rev, other._rev)
599
604
600 def walk(self, match):
605 def walk(self, match):
601 fset = set(match.files())
606 fset = set(match.files())
602 # for dirstate.walk, files=['.'] means "walk the whole tree".
607 # for dirstate.walk, files=['.'] means "walk the whole tree".
603 # follow that here, too
608 # follow that here, too
604 fset.discard('.')
609 fset.discard('.')
605
610
606 # avoid the entire walk if we're only looking for specific files
611 # avoid the entire walk if we're only looking for specific files
607 if fset and not match.anypats():
612 if fset and not match.anypats():
608 if util.all([fn in self for fn in fset]):
613 if util.all([fn in self for fn in fset]):
609 for fn in sorted(fset):
614 for fn in sorted(fset):
610 if match(fn):
615 if match(fn):
611 yield fn
616 yield fn
612 raise StopIteration
617 raise StopIteration
613
618
614 for fn in self:
619 for fn in self:
615 if fn in fset:
620 if fn in fset:
616 # specified pattern is the exact name
621 # specified pattern is the exact name
617 fset.remove(fn)
622 fset.remove(fn)
618 if match(fn):
623 if match(fn):
619 yield fn
624 yield fn
620 for fn in sorted(fset):
625 for fn in sorted(fset):
621 if fn in self._dirs:
626 if fn in self._dirs:
622 # specified pattern is a directory
627 # specified pattern is a directory
623 continue
628 continue
624 match.bad(fn, _('no such file in rev %s') % self)
629 match.bad(fn, _('no such file in rev %s') % self)
625
630
626 def matches(self, match):
631 def matches(self, match):
627 return self.walk(match)
632 return self.walk(match)
628
633
629 class basefilectx(object):
634 class basefilectx(object):
630 """A filecontext object represents the common logic for its children:
635 """A filecontext object represents the common logic for its children:
631 filectx: read-only access to a filerevision that is already present
636 filectx: read-only access to a filerevision that is already present
632 in the repo,
637 in the repo,
633 workingfilectx: a filecontext that represents files from the working
638 workingfilectx: a filecontext that represents files from the working
634 directory,
639 directory,
635 memfilectx: a filecontext that represents files in-memory."""
640 memfilectx: a filecontext that represents files in-memory."""
636 def __new__(cls, repo, path, *args, **kwargs):
641 def __new__(cls, repo, path, *args, **kwargs):
637 return super(basefilectx, cls).__new__(cls)
642 return super(basefilectx, cls).__new__(cls)
638
643
639 @propertycache
644 @propertycache
640 def _filelog(self):
645 def _filelog(self):
641 return self._repo.file(self._path)
646 return self._repo.file(self._path)
642
647
643 @propertycache
648 @propertycache
644 def _changeid(self):
649 def _changeid(self):
645 if '_changeid' in self.__dict__:
650 if '_changeid' in self.__dict__:
646 return self._changeid
651 return self._changeid
647 elif '_changectx' in self.__dict__:
652 elif '_changectx' in self.__dict__:
648 return self._changectx.rev()
653 return self._changectx.rev()
649 else:
654 else:
650 return self._filelog.linkrev(self._filerev)
655 return self._filelog.linkrev(self._filerev)
651
656
652 @propertycache
657 @propertycache
653 def _filenode(self):
658 def _filenode(self):
654 if '_fileid' in self.__dict__:
659 if '_fileid' in self.__dict__:
655 return self._filelog.lookup(self._fileid)
660 return self._filelog.lookup(self._fileid)
656 else:
661 else:
657 return self._changectx.filenode(self._path)
662 return self._changectx.filenode(self._path)
658
663
659 @propertycache
664 @propertycache
660 def _filerev(self):
665 def _filerev(self):
661 return self._filelog.rev(self._filenode)
666 return self._filelog.rev(self._filenode)
662
667
663 @propertycache
668 @propertycache
664 def _repopath(self):
669 def _repopath(self):
665 return self._path
670 return self._path
666
671
667 def __nonzero__(self):
672 def __nonzero__(self):
668 try:
673 try:
669 self._filenode
674 self._filenode
670 return True
675 return True
671 except error.LookupError:
676 except error.LookupError:
672 # file is missing
677 # file is missing
673 return False
678 return False
674
679
675 def __str__(self):
680 def __str__(self):
676 return "%s@%s" % (self.path(), self._changectx)
681 return "%s@%s" % (self.path(), self._changectx)
677
682
678 def __repr__(self):
683 def __repr__(self):
679 return "<%s %s>" % (type(self).__name__, str(self))
684 return "<%s %s>" % (type(self).__name__, str(self))
680
685
681 def __hash__(self):
686 def __hash__(self):
682 try:
687 try:
683 return hash((self._path, self._filenode))
688 return hash((self._path, self._filenode))
684 except AttributeError:
689 except AttributeError:
685 return id(self)
690 return id(self)
686
691
687 def __eq__(self, other):
692 def __eq__(self, other):
688 try:
693 try:
689 return (type(self) == type(other) and self._path == other._path
694 return (type(self) == type(other) and self._path == other._path
690 and self._filenode == other._filenode)
695 and self._filenode == other._filenode)
691 except AttributeError:
696 except AttributeError:
692 return False
697 return False
693
698
694 def __ne__(self, other):
699 def __ne__(self, other):
695 return not (self == other)
700 return not (self == other)
696
701
697 def filerev(self):
702 def filerev(self):
698 return self._filerev
703 return self._filerev
699 def filenode(self):
704 def filenode(self):
700 return self._filenode
705 return self._filenode
701 def flags(self):
706 def flags(self):
702 return self._changectx.flags(self._path)
707 return self._changectx.flags(self._path)
703 def filelog(self):
708 def filelog(self):
704 return self._filelog
709 return self._filelog
705 def rev(self):
710 def rev(self):
706 return self._changeid
711 return self._changeid
707 def linkrev(self):
712 def linkrev(self):
708 return self._filelog.linkrev(self._filerev)
713 return self._filelog.linkrev(self._filerev)
709 def node(self):
714 def node(self):
710 return self._changectx.node()
715 return self._changectx.node()
711 def hex(self):
716 def hex(self):
712 return self._changectx.hex()
717 return self._changectx.hex()
713 def user(self):
718 def user(self):
714 return self._changectx.user()
719 return self._changectx.user()
715 def date(self):
720 def date(self):
716 return self._changectx.date()
721 return self._changectx.date()
717 def files(self):
722 def files(self):
718 return self._changectx.files()
723 return self._changectx.files()
719 def description(self):
724 def description(self):
720 return self._changectx.description()
725 return self._changectx.description()
721 def branch(self):
726 def branch(self):
722 return self._changectx.branch()
727 return self._changectx.branch()
723 def extra(self):
728 def extra(self):
724 return self._changectx.extra()
729 return self._changectx.extra()
725 def phase(self):
730 def phase(self):
726 return self._changectx.phase()
731 return self._changectx.phase()
727 def phasestr(self):
732 def phasestr(self):
728 return self._changectx.phasestr()
733 return self._changectx.phasestr()
729 def manifest(self):
734 def manifest(self):
730 return self._changectx.manifest()
735 return self._changectx.manifest()
731 def changectx(self):
736 def changectx(self):
732 return self._changectx
737 return self._changectx
733
738
734 def path(self):
739 def path(self):
735 return self._path
740 return self._path
736
741
737 def isbinary(self):
742 def isbinary(self):
738 try:
743 try:
739 return util.binary(self.data())
744 return util.binary(self.data())
740 except IOError:
745 except IOError:
741 return False
746 return False
742 def isexec(self):
747 def isexec(self):
743 return 'x' in self.flags()
748 return 'x' in self.flags()
744 def islink(self):
749 def islink(self):
745 return 'l' in self.flags()
750 return 'l' in self.flags()
746
751
747 def cmp(self, fctx):
752 def cmp(self, fctx):
748 """compare with other file context
753 """compare with other file context
749
754
750 returns True if different than fctx.
755 returns True if different than fctx.
751 """
756 """
752 if (fctx._filerev is None
757 if (fctx._filerev is None
753 and (self._repo._encodefilterpats
758 and (self._repo._encodefilterpats
754 # if file data starts with '\1\n', empty metadata block is
759 # if file data starts with '\1\n', empty metadata block is
755 # prepended, which adds 4 bytes to filelog.size().
760 # prepended, which adds 4 bytes to filelog.size().
756 or self.size() - 4 == fctx.size())
761 or self.size() - 4 == fctx.size())
757 or self.size() == fctx.size()):
762 or self.size() == fctx.size()):
758 return self._filelog.cmp(self._filenode, fctx.data())
763 return self._filelog.cmp(self._filenode, fctx.data())
759
764
760 return True
765 return True
761
766
762 def parents(self):
767 def parents(self):
763 _path = self._path
768 _path = self._path
764 fl = self._filelog
769 fl = self._filelog
765 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
770 pl = [(_path, n, fl) for n in self._filelog.parents(self._filenode)]
766
771
767 r = self._filelog.renamed(self._filenode)
772 r = self._filelog.renamed(self._filenode)
768 if r:
773 if r:
769 pl[0] = (r[0], r[1], None)
774 pl[0] = (r[0], r[1], None)
770
775
771 return [filectx(self._repo, p, fileid=n, filelog=l)
776 return [filectx(self._repo, p, fileid=n, filelog=l)
772 for p, n, l in pl if n != nullid]
777 for p, n, l in pl if n != nullid]
773
778
774 def p1(self):
779 def p1(self):
775 return self.parents()[0]
780 return self.parents()[0]
776
781
777 def p2(self):
782 def p2(self):
778 p = self.parents()
783 p = self.parents()
779 if len(p) == 2:
784 if len(p) == 2:
780 return p[1]
785 return p[1]
781 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
786 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
782
787
783 def annotate(self, follow=False, linenumber=None, diffopts=None):
788 def annotate(self, follow=False, linenumber=None, diffopts=None):
784 '''returns a list of tuples of (ctx, line) for each line
789 '''returns a list of tuples of (ctx, line) for each line
785 in the file, where ctx is the filectx of the node where
790 in the file, where ctx is the filectx of the node where
786 that line was last changed.
791 that line was last changed.
787 This returns tuples of ((ctx, linenumber), line) for each line,
792 This returns tuples of ((ctx, linenumber), line) for each line,
788 if "linenumber" parameter is NOT "None".
793 if "linenumber" parameter is NOT "None".
789 In such tuples, linenumber means one at the first appearance
794 In such tuples, linenumber means one at the first appearance
790 in the managed file.
795 in the managed file.
791 To reduce annotation cost,
796 To reduce annotation cost,
792 this returns fixed value(False is used) as linenumber,
797 this returns fixed value(False is used) as linenumber,
793 if "linenumber" parameter is "False".'''
798 if "linenumber" parameter is "False".'''
794
799
795 if linenumber is None:
800 if linenumber is None:
796 def decorate(text, rev):
801 def decorate(text, rev):
797 return ([rev] * len(text.splitlines()), text)
802 return ([rev] * len(text.splitlines()), text)
798 elif linenumber:
803 elif linenumber:
799 def decorate(text, rev):
804 def decorate(text, rev):
800 size = len(text.splitlines())
805 size = len(text.splitlines())
801 return ([(rev, i) for i in xrange(1, size + 1)], text)
806 return ([(rev, i) for i in xrange(1, size + 1)], text)
802 else:
807 else:
803 def decorate(text, rev):
808 def decorate(text, rev):
804 return ([(rev, False)] * len(text.splitlines()), text)
809 return ([(rev, False)] * len(text.splitlines()), text)
805
810
806 def pair(parent, child):
811 def pair(parent, child):
807 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
812 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
808 refine=True)
813 refine=True)
809 for (a1, a2, b1, b2), t in blocks:
814 for (a1, a2, b1, b2), t in blocks:
810 # Changed blocks ('!') or blocks made only of blank lines ('~')
815 # Changed blocks ('!') or blocks made only of blank lines ('~')
811 # belong to the child.
816 # belong to the child.
812 if t == '=':
817 if t == '=':
813 child[0][b1:b2] = parent[0][a1:a2]
818 child[0][b1:b2] = parent[0][a1:a2]
814 return child
819 return child
815
820
816 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
821 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
817
822
818 def parents(f):
823 def parents(f):
819 pl = f.parents()
824 pl = f.parents()
820
825
821 # Don't return renamed parents if we aren't following.
826 # Don't return renamed parents if we aren't following.
822 if not follow:
827 if not follow:
823 pl = [p for p in pl if p.path() == f.path()]
828 pl = [p for p in pl if p.path() == f.path()]
824
829
825 # renamed filectx won't have a filelog yet, so set it
830 # renamed filectx won't have a filelog yet, so set it
826 # from the cache to save time
831 # from the cache to save time
827 for p in pl:
832 for p in pl:
828 if not '_filelog' in p.__dict__:
833 if not '_filelog' in p.__dict__:
829 p._filelog = getlog(p.path())
834 p._filelog = getlog(p.path())
830
835
831 return pl
836 return pl
832
837
833 # use linkrev to find the first changeset where self appeared
838 # use linkrev to find the first changeset where self appeared
834 if self.rev() != self.linkrev():
839 if self.rev() != self.linkrev():
835 base = self.filectx(self.filenode())
840 base = self.filectx(self.filenode())
836 else:
841 else:
837 base = self
842 base = self
838
843
839 # This algorithm would prefer to be recursive, but Python is a
844 # This algorithm would prefer to be recursive, but Python is a
840 # bit recursion-hostile. Instead we do an iterative
845 # bit recursion-hostile. Instead we do an iterative
841 # depth-first search.
846 # depth-first search.
842
847
843 visit = [base]
848 visit = [base]
844 hist = {}
849 hist = {}
845 pcache = {}
850 pcache = {}
846 needed = {base: 1}
851 needed = {base: 1}
847 while visit:
852 while visit:
848 f = visit[-1]
853 f = visit[-1]
849 pcached = f in pcache
854 pcached = f in pcache
850 if not pcached:
855 if not pcached:
851 pcache[f] = parents(f)
856 pcache[f] = parents(f)
852
857
853 ready = True
858 ready = True
854 pl = pcache[f]
859 pl = pcache[f]
855 for p in pl:
860 for p in pl:
856 if p not in hist:
861 if p not in hist:
857 ready = False
862 ready = False
858 visit.append(p)
863 visit.append(p)
859 if not pcached:
864 if not pcached:
860 needed[p] = needed.get(p, 0) + 1
865 needed[p] = needed.get(p, 0) + 1
861 if ready:
866 if ready:
862 visit.pop()
867 visit.pop()
863 reusable = f in hist
868 reusable = f in hist
864 if reusable:
869 if reusable:
865 curr = hist[f]
870 curr = hist[f]
866 else:
871 else:
867 curr = decorate(f.data(), f)
872 curr = decorate(f.data(), f)
868 for p in pl:
873 for p in pl:
869 if not reusable:
874 if not reusable:
870 curr = pair(hist[p], curr)
875 curr = pair(hist[p], curr)
871 if needed[p] == 1:
876 if needed[p] == 1:
872 del hist[p]
877 del hist[p]
873 del needed[p]
878 del needed[p]
874 else:
879 else:
875 needed[p] -= 1
880 needed[p] -= 1
876
881
877 hist[f] = curr
882 hist[f] = curr
878 pcache[f] = []
883 pcache[f] = []
879
884
880 return zip(hist[base][0], hist[base][1].splitlines(True))
885 return zip(hist[base][0], hist[base][1].splitlines(True))
881
886
882 def ancestors(self, followfirst=False):
887 def ancestors(self, followfirst=False):
883 visit = {}
888 visit = {}
884 c = self
889 c = self
885 cut = followfirst and 1 or None
890 cut = followfirst and 1 or None
886 while True:
891 while True:
887 for parent in c.parents()[:cut]:
892 for parent in c.parents()[:cut]:
888 visit[(parent.rev(), parent.node())] = parent
893 visit[(parent.rev(), parent.node())] = parent
889 if not visit:
894 if not visit:
890 break
895 break
891 c = visit.pop(max(visit))
896 c = visit.pop(max(visit))
892 yield c
897 yield c
893
898
894 class filectx(basefilectx):
899 class filectx(basefilectx):
895 """A filecontext object makes access to data related to a particular
900 """A filecontext object makes access to data related to a particular
896 filerevision convenient."""
901 filerevision convenient."""
897 def __init__(self, repo, path, changeid=None, fileid=None,
902 def __init__(self, repo, path, changeid=None, fileid=None,
898 filelog=None, changectx=None):
903 filelog=None, changectx=None):
899 """changeid can be a changeset revision, node, or tag.
904 """changeid can be a changeset revision, node, or tag.
900 fileid can be a file revision or node."""
905 fileid can be a file revision or node."""
901 self._repo = repo
906 self._repo = repo
902 self._path = path
907 self._path = path
903
908
904 assert (changeid is not None
909 assert (changeid is not None
905 or fileid is not None
910 or fileid is not None
906 or changectx is not None), \
911 or changectx is not None), \
907 ("bad args: changeid=%r, fileid=%r, changectx=%r"
912 ("bad args: changeid=%r, fileid=%r, changectx=%r"
908 % (changeid, fileid, changectx))
913 % (changeid, fileid, changectx))
909
914
910 if filelog is not None:
915 if filelog is not None:
911 self._filelog = filelog
916 self._filelog = filelog
912
917
913 if changeid is not None:
918 if changeid is not None:
914 self._changeid = changeid
919 self._changeid = changeid
915 if changectx is not None:
920 if changectx is not None:
916 self._changectx = changectx
921 self._changectx = changectx
917 if fileid is not None:
922 if fileid is not None:
918 self._fileid = fileid
923 self._fileid = fileid
919
924
920 @propertycache
925 @propertycache
921 def _changectx(self):
926 def _changectx(self):
922 try:
927 try:
923 return changectx(self._repo, self._changeid)
928 return changectx(self._repo, self._changeid)
924 except error.RepoLookupError:
929 except error.RepoLookupError:
925 # Linkrev may point to any revision in the repository. When the
930 # Linkrev may point to any revision in the repository. When the
926 # repository is filtered this may lead to `filectx` trying to build
931 # repository is filtered this may lead to `filectx` trying to build
927 # `changectx` for filtered revision. In such case we fallback to
932 # `changectx` for filtered revision. In such case we fallback to
928 # creating `changectx` on the unfiltered version of the reposition.
933 # creating `changectx` on the unfiltered version of the reposition.
929 # This fallback should not be an issue because `changectx` from
934 # This fallback should not be an issue because `changectx` from
930 # `filectx` are not used in complex operations that care about
935 # `filectx` are not used in complex operations that care about
931 # filtering.
936 # filtering.
932 #
937 #
933 # This fallback is a cheap and dirty fix that prevent several
938 # This fallback is a cheap and dirty fix that prevent several
934 # crashes. It does not ensure the behavior is correct. However the
939 # crashes. It does not ensure the behavior is correct. However the
935 # behavior was not correct before filtering either and "incorrect
940 # behavior was not correct before filtering either and "incorrect
936 # behavior" is seen as better as "crash"
941 # behavior" is seen as better as "crash"
937 #
942 #
938 # Linkrevs have several serious troubles with filtering that are
943 # Linkrevs have several serious troubles with filtering that are
939 # complicated to solve. Proper handling of the issue here should be
944 # complicated to solve. Proper handling of the issue here should be
940 # considered when solving linkrev issue are on the table.
945 # considered when solving linkrev issue are on the table.
941 return changectx(self._repo.unfiltered(), self._changeid)
946 return changectx(self._repo.unfiltered(), self._changeid)
942
947
943 def filectx(self, fileid):
948 def filectx(self, fileid):
944 '''opens an arbitrary revision of the file without
949 '''opens an arbitrary revision of the file without
945 opening a new filelog'''
950 opening a new filelog'''
946 return filectx(self._repo, self._path, fileid=fileid,
951 return filectx(self._repo, self._path, fileid=fileid,
947 filelog=self._filelog)
952 filelog=self._filelog)
948
953
949 def data(self):
954 def data(self):
950 try:
955 try:
951 return self._filelog.read(self._filenode)
956 return self._filelog.read(self._filenode)
952 except error.CensoredNodeError:
957 except error.CensoredNodeError:
953 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
958 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
954 return ""
959 return ""
955 raise util.Abort(_("censored node: %s") % short(self._filenode),
960 raise util.Abort(_("censored node: %s") % short(self._filenode),
956 hint=_("set censor.policy to ignore errors"))
961 hint=_("set censor.policy to ignore errors"))
957
962
958 def size(self):
963 def size(self):
959 return self._filelog.size(self._filerev)
964 return self._filelog.size(self._filerev)
960
965
961 def renamed(self):
966 def renamed(self):
962 """check if file was actually renamed in this changeset revision
967 """check if file was actually renamed in this changeset revision
963
968
964 If rename logged in file revision, we report copy for changeset only
969 If rename logged in file revision, we report copy for changeset only
965 if file revisions linkrev points back to the changeset in question
970 if file revisions linkrev points back to the changeset in question
966 or both changeset parents contain different file revisions.
971 or both changeset parents contain different file revisions.
967 """
972 """
968
973
969 renamed = self._filelog.renamed(self._filenode)
974 renamed = self._filelog.renamed(self._filenode)
970 if not renamed:
975 if not renamed:
971 return renamed
976 return renamed
972
977
973 if self.rev() == self.linkrev():
978 if self.rev() == self.linkrev():
974 return renamed
979 return renamed
975
980
976 name = self.path()
981 name = self.path()
977 fnode = self._filenode
982 fnode = self._filenode
978 for p in self._changectx.parents():
983 for p in self._changectx.parents():
979 try:
984 try:
980 if fnode == p.filenode(name):
985 if fnode == p.filenode(name):
981 return None
986 return None
982 except error.LookupError:
987 except error.LookupError:
983 pass
988 pass
984 return renamed
989 return renamed
985
990
986 def children(self):
991 def children(self):
987 # hard for renames
992 # hard for renames
988 c = self._filelog.children(self._filenode)
993 c = self._filelog.children(self._filenode)
989 return [filectx(self._repo, self._path, fileid=x,
994 return [filectx(self._repo, self._path, fileid=x,
990 filelog=self._filelog) for x in c]
995 filelog=self._filelog) for x in c]
991
996
992 class committablectx(basectx):
997 class committablectx(basectx):
993 """A committablectx object provides common functionality for a context that
998 """A committablectx object provides common functionality for a context that
994 wants the ability to commit, e.g. workingctx or memctx."""
999 wants the ability to commit, e.g. workingctx or memctx."""
995 def __init__(self, repo, text="", user=None, date=None, extra=None,
1000 def __init__(self, repo, text="", user=None, date=None, extra=None,
996 changes=None):
1001 changes=None):
997 self._repo = repo
1002 self._repo = repo
998 self._rev = None
1003 self._rev = None
999 self._node = None
1004 self._node = None
1000 self._text = text
1005 self._text = text
1001 if date:
1006 if date:
1002 self._date = util.parsedate(date)
1007 self._date = util.parsedate(date)
1003 if user:
1008 if user:
1004 self._user = user
1009 self._user = user
1005 if changes:
1010 if changes:
1006 self._status = changes
1011 self._status = changes
1007
1012
1008 self._extra = {}
1013 self._extra = {}
1009 if extra:
1014 if extra:
1010 self._extra = extra.copy()
1015 self._extra = extra.copy()
1011 if 'branch' not in self._extra:
1016 if 'branch' not in self._extra:
1012 try:
1017 try:
1013 branch = encoding.fromlocal(self._repo.dirstate.branch())
1018 branch = encoding.fromlocal(self._repo.dirstate.branch())
1014 except UnicodeDecodeError:
1019 except UnicodeDecodeError:
1015 raise util.Abort(_('branch name not in UTF-8!'))
1020 raise util.Abort(_('branch name not in UTF-8!'))
1016 self._extra['branch'] = branch
1021 self._extra['branch'] = branch
1017 if self._extra['branch'] == '':
1022 if self._extra['branch'] == '':
1018 self._extra['branch'] = 'default'
1023 self._extra['branch'] = 'default'
1019
1024
1020 def __str__(self):
1025 def __str__(self):
1021 return str(self._parents[0]) + "+"
1026 return str(self._parents[0]) + "+"
1022
1027
1023 def __nonzero__(self):
1028 def __nonzero__(self):
1024 return True
1029 return True
1025
1030
1026 def _buildflagfunc(self):
1031 def _buildflagfunc(self):
1027 # Create a fallback function for getting file flags when the
1032 # Create a fallback function for getting file flags when the
1028 # filesystem doesn't support them
1033 # filesystem doesn't support them
1029
1034
1030 copiesget = self._repo.dirstate.copies().get
1035 copiesget = self._repo.dirstate.copies().get
1031
1036
1032 if len(self._parents) < 2:
1037 if len(self._parents) < 2:
1033 # when we have one parent, it's easy: copy from parent
1038 # when we have one parent, it's easy: copy from parent
1034 man = self._parents[0].manifest()
1039 man = self._parents[0].manifest()
1035 def func(f):
1040 def func(f):
1036 f = copiesget(f, f)
1041 f = copiesget(f, f)
1037 return man.flags(f)
1042 return man.flags(f)
1038 else:
1043 else:
1039 # merges are tricky: we try to reconstruct the unstored
1044 # merges are tricky: we try to reconstruct the unstored
1040 # result from the merge (issue1802)
1045 # result from the merge (issue1802)
1041 p1, p2 = self._parents
1046 p1, p2 = self._parents
1042 pa = p1.ancestor(p2)
1047 pa = p1.ancestor(p2)
1043 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1048 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1044
1049
1045 def func(f):
1050 def func(f):
1046 f = copiesget(f, f) # may be wrong for merges with copies
1051 f = copiesget(f, f) # may be wrong for merges with copies
1047 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1052 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1048 if fl1 == fl2:
1053 if fl1 == fl2:
1049 return fl1
1054 return fl1
1050 if fl1 == fla:
1055 if fl1 == fla:
1051 return fl2
1056 return fl2
1052 if fl2 == fla:
1057 if fl2 == fla:
1053 return fl1
1058 return fl1
1054 return '' # punt for conflicts
1059 return '' # punt for conflicts
1055
1060
1056 return func
1061 return func
1057
1062
1058 @propertycache
1063 @propertycache
1059 def _flagfunc(self):
1064 def _flagfunc(self):
1060 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1065 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1061
1066
1062 @propertycache
1067 @propertycache
1063 def _manifest(self):
1068 def _manifest(self):
1064 """generate a manifest corresponding to the values in self._status"""
1069 """generate a manifest corresponding to the values in self._status"""
1065
1070
1066 man1 = self._parents[0].manifest()
1071 man1 = self._parents[0].manifest()
1067 man = man1.copy()
1072 man = man1.copy()
1068 if len(self._parents) > 1:
1073 if len(self._parents) > 1:
1069 man2 = self.p2().manifest()
1074 man2 = self.p2().manifest()
1070 def getman(f):
1075 def getman(f):
1071 if f in man1:
1076 if f in man1:
1072 return man1
1077 return man1
1073 return man2
1078 return man2
1074 else:
1079 else:
1075 getman = lambda f: man1
1080 getman = lambda f: man1
1076
1081
1077 copied = self._repo.dirstate.copies()
1082 copied = self._repo.dirstate.copies()
1078 ff = self._flagfunc
1083 ff = self._flagfunc
1079 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1084 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1080 for f in l:
1085 for f in l:
1081 orig = copied.get(f, f)
1086 orig = copied.get(f, f)
1082 man[f] = getman(orig).get(orig, nullid) + i
1087 man[f] = getman(orig).get(orig, nullid) + i
1083 try:
1088 try:
1084 man.setflag(f, ff(f))
1089 man.setflag(f, ff(f))
1085 except OSError:
1090 except OSError:
1086 pass
1091 pass
1087
1092
1088 for f in self._status.deleted + self._status.removed:
1093 for f in self._status.deleted + self._status.removed:
1089 if f in man:
1094 if f in man:
1090 del man[f]
1095 del man[f]
1091
1096
1092 return man
1097 return man
1093
1098
1094 @propertycache
1099 @propertycache
1095 def _status(self):
1100 def _status(self):
1096 return self._repo.status()
1101 return self._repo.status()
1097
1102
1098 @propertycache
1103 @propertycache
1099 def _user(self):
1104 def _user(self):
1100 return self._repo.ui.username()
1105 return self._repo.ui.username()
1101
1106
1102 @propertycache
1107 @propertycache
1103 def _date(self):
1108 def _date(self):
1104 return util.makedate()
1109 return util.makedate()
1105
1110
1106 def subrev(self, subpath):
1111 def subrev(self, subpath):
1107 return None
1112 return None
1108
1113
1109 def user(self):
1114 def user(self):
1110 return self._user or self._repo.ui.username()
1115 return self._user or self._repo.ui.username()
1111 def date(self):
1116 def date(self):
1112 return self._date
1117 return self._date
1113 def description(self):
1118 def description(self):
1114 return self._text
1119 return self._text
1115 def files(self):
1120 def files(self):
1116 return sorted(self._status.modified + self._status.added +
1121 return sorted(self._status.modified + self._status.added +
1117 self._status.removed)
1122 self._status.removed)
1118
1123
1119 def modified(self):
1124 def modified(self):
1120 return self._status.modified
1125 return self._status.modified
1121 def added(self):
1126 def added(self):
1122 return self._status.added
1127 return self._status.added
1123 def removed(self):
1128 def removed(self):
1124 return self._status.removed
1129 return self._status.removed
1125 def deleted(self):
1130 def deleted(self):
1126 return self._status.deleted
1131 return self._status.deleted
1127 def unknown(self):
1132 def unknown(self):
1128 return self._status.unknown
1133 return self._status.unknown
1129 def ignored(self):
1134 def ignored(self):
1130 return self._status.ignored
1135 return self._status.ignored
1131 def clean(self):
1136 def clean(self):
1132 return self._status.clean
1137 return self._status.clean
1133 def branch(self):
1138 def branch(self):
1134 return encoding.tolocal(self._extra['branch'])
1139 return encoding.tolocal(self._extra['branch'])
1135 def closesbranch(self):
1140 def closesbranch(self):
1136 return 'close' in self._extra
1141 return 'close' in self._extra
1137 def extra(self):
1142 def extra(self):
1138 return self._extra
1143 return self._extra
1139
1144
1140 def tags(self):
1145 def tags(self):
1141 t = []
1146 t = []
1142 for p in self.parents():
1147 for p in self.parents():
1143 t.extend(p.tags())
1148 t.extend(p.tags())
1144 return t
1149 return t
1145
1150
1146 def bookmarks(self):
1151 def bookmarks(self):
1147 b = []
1152 b = []
1148 for p in self.parents():
1153 for p in self.parents():
1149 b.extend(p.bookmarks())
1154 b.extend(p.bookmarks())
1150 return b
1155 return b
1151
1156
1152 def phase(self):
1157 def phase(self):
1153 phase = phases.draft # default phase to draft
1158 phase = phases.draft # default phase to draft
1154 for p in self.parents():
1159 for p in self.parents():
1155 phase = max(phase, p.phase())
1160 phase = max(phase, p.phase())
1156 return phase
1161 return phase
1157
1162
1158 def hidden(self):
1163 def hidden(self):
1159 return False
1164 return False
1160
1165
1161 def children(self):
1166 def children(self):
1162 return []
1167 return []
1163
1168
1164 def flags(self, path):
1169 def flags(self, path):
1165 if '_manifest' in self.__dict__:
1170 if '_manifest' in self.__dict__:
1166 try:
1171 try:
1167 return self._manifest.flags(path)
1172 return self._manifest.flags(path)
1168 except KeyError:
1173 except KeyError:
1169 return ''
1174 return ''
1170
1175
1171 try:
1176 try:
1172 return self._flagfunc(path)
1177 return self._flagfunc(path)
1173 except OSError:
1178 except OSError:
1174 return ''
1179 return ''
1175
1180
1176 def ancestor(self, c2):
1181 def ancestor(self, c2):
1177 """return the "best" ancestor context of self and c2"""
1182 """return the "best" ancestor context of self and c2"""
1178 return self._parents[0].ancestor(c2) # punt on two parents for now
1183 return self._parents[0].ancestor(c2) # punt on two parents for now
1179
1184
1180 def walk(self, match):
1185 def walk(self, match):
1181 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1186 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1182 True, False))
1187 True, False))
1183
1188
1184 def matches(self, match):
1189 def matches(self, match):
1185 return sorted(self._repo.dirstate.matches(match))
1190 return sorted(self._repo.dirstate.matches(match))
1186
1191
1187 def ancestors(self):
1192 def ancestors(self):
1188 for a in self._repo.changelog.ancestors(
1193 for a in self._repo.changelog.ancestors(
1189 [p.rev() for p in self._parents]):
1194 [p.rev() for p in self._parents]):
1190 yield changectx(self._repo, a)
1195 yield changectx(self._repo, a)
1191
1196
1192 def markcommitted(self, node):
1197 def markcommitted(self, node):
1193 """Perform post-commit cleanup necessary after committing this ctx
1198 """Perform post-commit cleanup necessary after committing this ctx
1194
1199
1195 Specifically, this updates backing stores this working context
1200 Specifically, this updates backing stores this working context
1196 wraps to reflect the fact that the changes reflected by this
1201 wraps to reflect the fact that the changes reflected by this
1197 workingctx have been committed. For example, it marks
1202 workingctx have been committed. For example, it marks
1198 modified and added files as normal in the dirstate.
1203 modified and added files as normal in the dirstate.
1199
1204
1200 """
1205 """
1201
1206
1202 self._repo.dirstate.beginparentchange()
1207 self._repo.dirstate.beginparentchange()
1203 for f in self.modified() + self.added():
1208 for f in self.modified() + self.added():
1204 self._repo.dirstate.normal(f)
1209 self._repo.dirstate.normal(f)
1205 for f in self.removed():
1210 for f in self.removed():
1206 self._repo.dirstate.drop(f)
1211 self._repo.dirstate.drop(f)
1207 self._repo.dirstate.setparents(node)
1212 self._repo.dirstate.setparents(node)
1208 self._repo.dirstate.endparentchange()
1213 self._repo.dirstate.endparentchange()
1209
1214
1210 def dirs(self):
1215 def dirs(self):
1211 return self._repo.dirstate.dirs()
1216 return self._repo.dirstate.dirs()
1212
1217
1213 class workingctx(committablectx):
1218 class workingctx(committablectx):
1214 """A workingctx object makes access to data related to
1219 """A workingctx object makes access to data related to
1215 the current working directory convenient.
1220 the current working directory convenient.
1216 date - any valid date string or (unixtime, offset), or None.
1221 date - any valid date string or (unixtime, offset), or None.
1217 user - username string, or None.
1222 user - username string, or None.
1218 extra - a dictionary of extra values, or None.
1223 extra - a dictionary of extra values, or None.
1219 changes - a list of file lists as returned by localrepo.status()
1224 changes - a list of file lists as returned by localrepo.status()
1220 or None to use the repository status.
1225 or None to use the repository status.
1221 """
1226 """
1222 def __init__(self, repo, text="", user=None, date=None, extra=None,
1227 def __init__(self, repo, text="", user=None, date=None, extra=None,
1223 changes=None):
1228 changes=None):
1224 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1229 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1225
1230
1226 def __iter__(self):
1231 def __iter__(self):
1227 d = self._repo.dirstate
1232 d = self._repo.dirstate
1228 for f in d:
1233 for f in d:
1229 if d[f] != 'r':
1234 if d[f] != 'r':
1230 yield f
1235 yield f
1231
1236
1232 def __contains__(self, key):
1237 def __contains__(self, key):
1233 return self._repo.dirstate[key] not in "?r"
1238 return self._repo.dirstate[key] not in "?r"
1234
1239
1235 @propertycache
1240 @propertycache
1236 def _parents(self):
1241 def _parents(self):
1237 p = self._repo.dirstate.parents()
1242 p = self._repo.dirstate.parents()
1238 if p[1] == nullid:
1243 if p[1] == nullid:
1239 p = p[:-1]
1244 p = p[:-1]
1240 return [changectx(self._repo, x) for x in p]
1245 return [changectx(self._repo, x) for x in p]
1241
1246
1242 def filectx(self, path, filelog=None):
1247 def filectx(self, path, filelog=None):
1243 """get a file context from the working directory"""
1248 """get a file context from the working directory"""
1244 return workingfilectx(self._repo, path, workingctx=self,
1249 return workingfilectx(self._repo, path, workingctx=self,
1245 filelog=filelog)
1250 filelog=filelog)
1246
1251
1247 def dirty(self, missing=False, merge=True, branch=True):
1252 def dirty(self, missing=False, merge=True, branch=True):
1248 "check whether a working directory is modified"
1253 "check whether a working directory is modified"
1249 # check subrepos first
1254 # check subrepos first
1250 for s in sorted(self.substate):
1255 for s in sorted(self.substate):
1251 if self.sub(s).dirty():
1256 if self.sub(s).dirty():
1252 return True
1257 return True
1253 # check current working dir
1258 # check current working dir
1254 return ((merge and self.p2()) or
1259 return ((merge and self.p2()) or
1255 (branch and self.branch() != self.p1().branch()) or
1260 (branch and self.branch() != self.p1().branch()) or
1256 self.modified() or self.added() or self.removed() or
1261 self.modified() or self.added() or self.removed() or
1257 (missing and self.deleted()))
1262 (missing and self.deleted()))
1258
1263
1259 def add(self, list, prefix=""):
1264 def add(self, list, prefix=""):
1260 join = lambda f: os.path.join(prefix, f)
1265 join = lambda f: os.path.join(prefix, f)
1261 wlock = self._repo.wlock()
1266 wlock = self._repo.wlock()
1262 ui, ds = self._repo.ui, self._repo.dirstate
1267 ui, ds = self._repo.ui, self._repo.dirstate
1263 try:
1268 try:
1264 rejected = []
1269 rejected = []
1265 lstat = self._repo.wvfs.lstat
1270 lstat = self._repo.wvfs.lstat
1266 for f in list:
1271 for f in list:
1267 scmutil.checkportable(ui, join(f))
1272 scmutil.checkportable(ui, join(f))
1268 try:
1273 try:
1269 st = lstat(f)
1274 st = lstat(f)
1270 except OSError:
1275 except OSError:
1271 ui.warn(_("%s does not exist!\n") % join(f))
1276 ui.warn(_("%s does not exist!\n") % join(f))
1272 rejected.append(f)
1277 rejected.append(f)
1273 continue
1278 continue
1274 if st.st_size > 10000000:
1279 if st.st_size > 10000000:
1275 ui.warn(_("%s: up to %d MB of RAM may be required "
1280 ui.warn(_("%s: up to %d MB of RAM may be required "
1276 "to manage this file\n"
1281 "to manage this file\n"
1277 "(use 'hg revert %s' to cancel the "
1282 "(use 'hg revert %s' to cancel the "
1278 "pending addition)\n")
1283 "pending addition)\n")
1279 % (f, 3 * st.st_size // 1000000, join(f)))
1284 % (f, 3 * st.st_size // 1000000, join(f)))
1280 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1285 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1281 ui.warn(_("%s not added: only files and symlinks "
1286 ui.warn(_("%s not added: only files and symlinks "
1282 "supported currently\n") % join(f))
1287 "supported currently\n") % join(f))
1283 rejected.append(f)
1288 rejected.append(f)
1284 elif ds[f] in 'amn':
1289 elif ds[f] in 'amn':
1285 ui.warn(_("%s already tracked!\n") % join(f))
1290 ui.warn(_("%s already tracked!\n") % join(f))
1286 elif ds[f] == 'r':
1291 elif ds[f] == 'r':
1287 ds.normallookup(f)
1292 ds.normallookup(f)
1288 else:
1293 else:
1289 ds.add(f)
1294 ds.add(f)
1290 return rejected
1295 return rejected
1291 finally:
1296 finally:
1292 wlock.release()
1297 wlock.release()
1293
1298
1294 def forget(self, files, prefix=""):
1299 def forget(self, files, prefix=""):
1295 join = lambda f: os.path.join(prefix, f)
1300 join = lambda f: os.path.join(prefix, f)
1296 wlock = self._repo.wlock()
1301 wlock = self._repo.wlock()
1297 try:
1302 try:
1298 rejected = []
1303 rejected = []
1299 for f in files:
1304 for f in files:
1300 if f not in self._repo.dirstate:
1305 if f not in self._repo.dirstate:
1301 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1306 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1302 rejected.append(f)
1307 rejected.append(f)
1303 elif self._repo.dirstate[f] != 'a':
1308 elif self._repo.dirstate[f] != 'a':
1304 self._repo.dirstate.remove(f)
1309 self._repo.dirstate.remove(f)
1305 else:
1310 else:
1306 self._repo.dirstate.drop(f)
1311 self._repo.dirstate.drop(f)
1307 return rejected
1312 return rejected
1308 finally:
1313 finally:
1309 wlock.release()
1314 wlock.release()
1310
1315
1311 def undelete(self, list):
1316 def undelete(self, list):
1312 pctxs = self.parents()
1317 pctxs = self.parents()
1313 wlock = self._repo.wlock()
1318 wlock = self._repo.wlock()
1314 try:
1319 try:
1315 for f in list:
1320 for f in list:
1316 if self._repo.dirstate[f] != 'r':
1321 if self._repo.dirstate[f] != 'r':
1317 self._repo.ui.warn(_("%s not removed!\n") % f)
1322 self._repo.ui.warn(_("%s not removed!\n") % f)
1318 else:
1323 else:
1319 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1324 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1320 t = fctx.data()
1325 t = fctx.data()
1321 self._repo.wwrite(f, t, fctx.flags())
1326 self._repo.wwrite(f, t, fctx.flags())
1322 self._repo.dirstate.normal(f)
1327 self._repo.dirstate.normal(f)
1323 finally:
1328 finally:
1324 wlock.release()
1329 wlock.release()
1325
1330
1326 def copy(self, source, dest):
1331 def copy(self, source, dest):
1327 try:
1332 try:
1328 st = self._repo.wvfs.lstat(dest)
1333 st = self._repo.wvfs.lstat(dest)
1329 except OSError, err:
1334 except OSError, err:
1330 if err.errno != errno.ENOENT:
1335 if err.errno != errno.ENOENT:
1331 raise
1336 raise
1332 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1337 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1333 return
1338 return
1334 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1339 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1335 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1340 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1336 "symbolic link\n") % dest)
1341 "symbolic link\n") % dest)
1337 else:
1342 else:
1338 wlock = self._repo.wlock()
1343 wlock = self._repo.wlock()
1339 try:
1344 try:
1340 if self._repo.dirstate[dest] in '?':
1345 if self._repo.dirstate[dest] in '?':
1341 self._repo.dirstate.add(dest)
1346 self._repo.dirstate.add(dest)
1342 elif self._repo.dirstate[dest] in 'r':
1347 elif self._repo.dirstate[dest] in 'r':
1343 self._repo.dirstate.normallookup(dest)
1348 self._repo.dirstate.normallookup(dest)
1344 self._repo.dirstate.copy(source, dest)
1349 self._repo.dirstate.copy(source, dest)
1345 finally:
1350 finally:
1346 wlock.release()
1351 wlock.release()
1347
1352
1348 def _filtersuspectsymlink(self, files):
1353 def _filtersuspectsymlink(self, files):
1349 if not files or self._repo.dirstate._checklink:
1354 if not files or self._repo.dirstate._checklink:
1350 return files
1355 return files
1351
1356
1352 # Symlink placeholders may get non-symlink-like contents
1357 # Symlink placeholders may get non-symlink-like contents
1353 # via user error or dereferencing by NFS or Samba servers,
1358 # via user error or dereferencing by NFS or Samba servers,
1354 # so we filter out any placeholders that don't look like a
1359 # so we filter out any placeholders that don't look like a
1355 # symlink
1360 # symlink
1356 sane = []
1361 sane = []
1357 for f in files:
1362 for f in files:
1358 if self.flags(f) == 'l':
1363 if self.flags(f) == 'l':
1359 d = self[f].data()
1364 d = self[f].data()
1360 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1365 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1361 self._repo.ui.debug('ignoring suspect symlink placeholder'
1366 self._repo.ui.debug('ignoring suspect symlink placeholder'
1362 ' "%s"\n' % f)
1367 ' "%s"\n' % f)
1363 continue
1368 continue
1364 sane.append(f)
1369 sane.append(f)
1365 return sane
1370 return sane
1366
1371
1367 def _checklookup(self, files):
1372 def _checklookup(self, files):
1368 # check for any possibly clean files
1373 # check for any possibly clean files
1369 if not files:
1374 if not files:
1370 return [], []
1375 return [], []
1371
1376
1372 modified = []
1377 modified = []
1373 fixup = []
1378 fixup = []
1374 pctx = self._parents[0]
1379 pctx = self._parents[0]
1375 # do a full compare of any files that might have changed
1380 # do a full compare of any files that might have changed
1376 for f in sorted(files):
1381 for f in sorted(files):
1377 if (f not in pctx or self.flags(f) != pctx.flags(f)
1382 if (f not in pctx or self.flags(f) != pctx.flags(f)
1378 or pctx[f].cmp(self[f])):
1383 or pctx[f].cmp(self[f])):
1379 modified.append(f)
1384 modified.append(f)
1380 else:
1385 else:
1381 fixup.append(f)
1386 fixup.append(f)
1382
1387
1383 # update dirstate for files that are actually clean
1388 # update dirstate for files that are actually clean
1384 if fixup:
1389 if fixup:
1385 try:
1390 try:
1386 # updating the dirstate is optional
1391 # updating the dirstate is optional
1387 # so we don't wait on the lock
1392 # so we don't wait on the lock
1388 # wlock can invalidate the dirstate, so cache normal _after_
1393 # wlock can invalidate the dirstate, so cache normal _after_
1389 # taking the lock
1394 # taking the lock
1390 wlock = self._repo.wlock(False)
1395 wlock = self._repo.wlock(False)
1391 normal = self._repo.dirstate.normal
1396 normal = self._repo.dirstate.normal
1392 try:
1397 try:
1393 for f in fixup:
1398 for f in fixup:
1394 normal(f)
1399 normal(f)
1395 finally:
1400 finally:
1396 wlock.release()
1401 wlock.release()
1397 except error.LockError:
1402 except error.LockError:
1398 pass
1403 pass
1399 return modified, fixup
1404 return modified, fixup
1400
1405
1401 def _manifestmatches(self, match, s):
1406 def _manifestmatches(self, match, s):
1402 """Slow path for workingctx
1407 """Slow path for workingctx
1403
1408
1404 The fast path is when we compare the working directory to its parent
1409 The fast path is when we compare the working directory to its parent
1405 which means this function is comparing with a non-parent; therefore we
1410 which means this function is comparing with a non-parent; therefore we
1406 need to build a manifest and return what matches.
1411 need to build a manifest and return what matches.
1407 """
1412 """
1408 mf = self._repo['.']._manifestmatches(match, s)
1413 mf = self._repo['.']._manifestmatches(match, s)
1409 modified, added, removed = s[0:3]
1414 modified, added, removed = s[0:3]
1410 for f in modified + added:
1415 for f in modified + added:
1411 mf[f] = None
1416 mf[f] = _newnode
1412 mf.setflag(f, self.flags(f))
1417 mf.setflag(f, self.flags(f))
1413 for f in removed:
1418 for f in removed:
1414 if f in mf:
1419 if f in mf:
1415 del mf[f]
1420 del mf[f]
1416 return mf
1421 return mf
1417
1422
1418 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1423 def _prestatus(self, other, s, match, listignored, listclean, listunknown):
1419 """override the parent hook with a dirstate query
1424 """override the parent hook with a dirstate query
1420
1425
1421 We use this prestatus hook to populate the status with information from
1426 We use this prestatus hook to populate the status with information from
1422 the dirstate.
1427 the dirstate.
1423 """
1428 """
1424 # doesn't need to call super; if that changes, be aware that super
1429 # doesn't need to call super; if that changes, be aware that super
1425 # calls self.manifest which would slow down the common case of calling
1430 # calls self.manifest which would slow down the common case of calling
1426 # status against a workingctx's parent
1431 # status against a workingctx's parent
1427 return self._dirstatestatus(match, listignored, listclean, listunknown)
1432 return self._dirstatestatus(match, listignored, listclean, listunknown)
1428
1433
1429 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1434 def _poststatus(self, other, s, match, listignored, listclean, listunknown):
1430 """override the parent hook with a filter for suspect symlinks
1435 """override the parent hook with a filter for suspect symlinks
1431
1436
1432 We use this poststatus hook to filter out symlinks that might have
1437 We use this poststatus hook to filter out symlinks that might have
1433 accidentally ended up with the entire contents of the file they are
1438 accidentally ended up with the entire contents of the file they are
1434 susposed to be linking to.
1439 susposed to be linking to.
1435 """
1440 """
1436 s[0] = self._filtersuspectsymlink(s[0])
1441 s[0] = self._filtersuspectsymlink(s[0])
1437 self._status = scmutil.status(*s)
1442 self._status = scmutil.status(*s)
1438 return s
1443 return s
1439
1444
1440 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1445 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1441 unknown=False):
1446 unknown=False):
1442 '''Gets the status from the dirstate -- internal use only.'''
1447 '''Gets the status from the dirstate -- internal use only.'''
1443 listignored, listclean, listunknown = ignored, clean, unknown
1448 listignored, listclean, listunknown = ignored, clean, unknown
1444 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1449 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1445 subrepos = []
1450 subrepos = []
1446 if '.hgsub' in self:
1451 if '.hgsub' in self:
1447 subrepos = sorted(self.substate)
1452 subrepos = sorted(self.substate)
1448 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1453 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1449 listclean, listunknown)
1454 listclean, listunknown)
1450 modified, added, removed, deleted, unknown, ignored, clean = s
1455 modified, added, removed, deleted, unknown, ignored, clean = s
1451
1456
1452 # check for any possibly clean files
1457 # check for any possibly clean files
1453 if cmp:
1458 if cmp:
1454 modified2, fixup = self._checklookup(cmp)
1459 modified2, fixup = self._checklookup(cmp)
1455 modified += modified2
1460 modified += modified2
1456
1461
1457 # update dirstate for files that are actually clean
1462 # update dirstate for files that are actually clean
1458 if fixup and listclean:
1463 if fixup and listclean:
1459 clean += fixup
1464 clean += fixup
1460
1465
1461 return [modified, added, removed, deleted, unknown, ignored, clean]
1466 return [modified, added, removed, deleted, unknown, ignored, clean]
1462
1467
1463 def _buildstatus(self, other, s, match, listignored, listclean,
1468 def _buildstatus(self, other, s, match, listignored, listclean,
1464 listunknown):
1469 listunknown):
1465 """build a status with respect to another context
1470 """build a status with respect to another context
1466
1471
1467 This includes logic for maintaining the fast path of status when
1472 This includes logic for maintaining the fast path of status when
1468 comparing the working directory against its parent, which is to skip
1473 comparing the working directory against its parent, which is to skip
1469 building a new manifest if self (working directory) is not comparing
1474 building a new manifest if self (working directory) is not comparing
1470 against its parent (repo['.']).
1475 against its parent (repo['.']).
1471 """
1476 """
1472 if other != self._repo['.']:
1477 if other != self._repo['.']:
1473 s = super(workingctx, self)._buildstatus(other, s, match,
1478 s = super(workingctx, self)._buildstatus(other, s, match,
1474 listignored, listclean,
1479 listignored, listclean,
1475 listunknown)
1480 listunknown)
1476 return s
1481 return s
1477
1482
1478 def _matchstatus(self, other, s, match, listignored, listclean,
1483 def _matchstatus(self, other, s, match, listignored, listclean,
1479 listunknown):
1484 listunknown):
1480 """override the match method with a filter for directory patterns
1485 """override the match method with a filter for directory patterns
1481
1486
1482 We use inheritance to customize the match.bad method only in cases of
1487 We use inheritance to customize the match.bad method only in cases of
1483 workingctx since it belongs only to the working directory when
1488 workingctx since it belongs only to the working directory when
1484 comparing against the parent changeset.
1489 comparing against the parent changeset.
1485
1490
1486 If we aren't comparing against the working directory's parent, then we
1491 If we aren't comparing against the working directory's parent, then we
1487 just use the default match object sent to us.
1492 just use the default match object sent to us.
1488 """
1493 """
1489 superself = super(workingctx, self)
1494 superself = super(workingctx, self)
1490 match = superself._matchstatus(other, s, match, listignored, listclean,
1495 match = superself._matchstatus(other, s, match, listignored, listclean,
1491 listunknown)
1496 listunknown)
1492 if other != self._repo['.']:
1497 if other != self._repo['.']:
1493 def bad(f, msg):
1498 def bad(f, msg):
1494 # 'f' may be a directory pattern from 'match.files()',
1499 # 'f' may be a directory pattern from 'match.files()',
1495 # so 'f not in ctx1' is not enough
1500 # so 'f not in ctx1' is not enough
1496 if f not in other and f not in other.dirs():
1501 if f not in other and f not in other.dirs():
1497 self._repo.ui.warn('%s: %s\n' %
1502 self._repo.ui.warn('%s: %s\n' %
1498 (self._repo.dirstate.pathto(f), msg))
1503 (self._repo.dirstate.pathto(f), msg))
1499 match.bad = bad
1504 match.bad = bad
1500 return match
1505 return match
1501
1506
1502 def status(self, other='.', match=None, listignored=False,
1507 def status(self, other='.', match=None, listignored=False,
1503 listclean=False, listunknown=False, listsubrepos=False):
1508 listclean=False, listunknown=False, listsubrepos=False):
1504 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1509 # yet to be determined: what to do if 'other' is a 'workingctx' or a
1505 # 'memctx'?
1510 # 'memctx'?
1506 return super(workingctx, self).status(other, match, listignored,
1511 return super(workingctx, self).status(other, match, listignored,
1507 listclean, listunknown,
1512 listclean, listunknown,
1508 listsubrepos)
1513 listsubrepos)
1509
1514
1510 class committablefilectx(basefilectx):
1515 class committablefilectx(basefilectx):
1511 """A committablefilectx provides common functionality for a file context
1516 """A committablefilectx provides common functionality for a file context
1512 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1517 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1513 def __init__(self, repo, path, filelog=None, ctx=None):
1518 def __init__(self, repo, path, filelog=None, ctx=None):
1514 self._repo = repo
1519 self._repo = repo
1515 self._path = path
1520 self._path = path
1516 self._changeid = None
1521 self._changeid = None
1517 self._filerev = self._filenode = None
1522 self._filerev = self._filenode = None
1518
1523
1519 if filelog is not None:
1524 if filelog is not None:
1520 self._filelog = filelog
1525 self._filelog = filelog
1521 if ctx:
1526 if ctx:
1522 self._changectx = ctx
1527 self._changectx = ctx
1523
1528
1524 def __nonzero__(self):
1529 def __nonzero__(self):
1525 return True
1530 return True
1526
1531
1527 def parents(self):
1532 def parents(self):
1528 '''return parent filectxs, following copies if necessary'''
1533 '''return parent filectxs, following copies if necessary'''
1529 def filenode(ctx, path):
1534 def filenode(ctx, path):
1530 return ctx._manifest.get(path, nullid)
1535 return ctx._manifest.get(path, nullid)
1531
1536
1532 path = self._path
1537 path = self._path
1533 fl = self._filelog
1538 fl = self._filelog
1534 pcl = self._changectx._parents
1539 pcl = self._changectx._parents
1535 renamed = self.renamed()
1540 renamed = self.renamed()
1536
1541
1537 if renamed:
1542 if renamed:
1538 pl = [renamed + (None,)]
1543 pl = [renamed + (None,)]
1539 else:
1544 else:
1540 pl = [(path, filenode(pcl[0], path), fl)]
1545 pl = [(path, filenode(pcl[0], path), fl)]
1541
1546
1542 for pc in pcl[1:]:
1547 for pc in pcl[1:]:
1543 pl.append((path, filenode(pc, path), fl))
1548 pl.append((path, filenode(pc, path), fl))
1544
1549
1545 return [filectx(self._repo, p, fileid=n, filelog=l)
1550 return [filectx(self._repo, p, fileid=n, filelog=l)
1546 for p, n, l in pl if n != nullid]
1551 for p, n, l in pl if n != nullid]
1547
1552
1548 def children(self):
1553 def children(self):
1549 return []
1554 return []
1550
1555
1551 class workingfilectx(committablefilectx):
1556 class workingfilectx(committablefilectx):
1552 """A workingfilectx object makes access to data related to a particular
1557 """A workingfilectx object makes access to data related to a particular
1553 file in the working directory convenient."""
1558 file in the working directory convenient."""
1554 def __init__(self, repo, path, filelog=None, workingctx=None):
1559 def __init__(self, repo, path, filelog=None, workingctx=None):
1555 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1560 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1556
1561
1557 @propertycache
1562 @propertycache
1558 def _changectx(self):
1563 def _changectx(self):
1559 return workingctx(self._repo)
1564 return workingctx(self._repo)
1560
1565
1561 def data(self):
1566 def data(self):
1562 return self._repo.wread(self._path)
1567 return self._repo.wread(self._path)
1563 def renamed(self):
1568 def renamed(self):
1564 rp = self._repo.dirstate.copied(self._path)
1569 rp = self._repo.dirstate.copied(self._path)
1565 if not rp:
1570 if not rp:
1566 return None
1571 return None
1567 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1572 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1568
1573
1569 def size(self):
1574 def size(self):
1570 return self._repo.wvfs.lstat(self._path).st_size
1575 return self._repo.wvfs.lstat(self._path).st_size
1571 def date(self):
1576 def date(self):
1572 t, tz = self._changectx.date()
1577 t, tz = self._changectx.date()
1573 try:
1578 try:
1574 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1579 return (int(self._repo.wvfs.lstat(self._path).st_mtime), tz)
1575 except OSError, err:
1580 except OSError, err:
1576 if err.errno != errno.ENOENT:
1581 if err.errno != errno.ENOENT:
1577 raise
1582 raise
1578 return (t, tz)
1583 return (t, tz)
1579
1584
1580 def cmp(self, fctx):
1585 def cmp(self, fctx):
1581 """compare with other file context
1586 """compare with other file context
1582
1587
1583 returns True if different than fctx.
1588 returns True if different than fctx.
1584 """
1589 """
1585 # fctx should be a filectx (not a workingfilectx)
1590 # fctx should be a filectx (not a workingfilectx)
1586 # invert comparison to reuse the same code path
1591 # invert comparison to reuse the same code path
1587 return fctx.cmp(self)
1592 return fctx.cmp(self)
1588
1593
1589 def remove(self, ignoremissing=False):
1594 def remove(self, ignoremissing=False):
1590 """wraps unlink for a repo's working directory"""
1595 """wraps unlink for a repo's working directory"""
1591 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1596 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1592
1597
1593 def write(self, data, flags):
1598 def write(self, data, flags):
1594 """wraps repo.wwrite"""
1599 """wraps repo.wwrite"""
1595 self._repo.wwrite(self._path, data, flags)
1600 self._repo.wwrite(self._path, data, flags)
1596
1601
1597 class memctx(committablectx):
1602 class memctx(committablectx):
1598 """Use memctx to perform in-memory commits via localrepo.commitctx().
1603 """Use memctx to perform in-memory commits via localrepo.commitctx().
1599
1604
1600 Revision information is supplied at initialization time while
1605 Revision information is supplied at initialization time while
1601 related files data and is made available through a callback
1606 related files data and is made available through a callback
1602 mechanism. 'repo' is the current localrepo, 'parents' is a
1607 mechanism. 'repo' is the current localrepo, 'parents' is a
1603 sequence of two parent revisions identifiers (pass None for every
1608 sequence of two parent revisions identifiers (pass None for every
1604 missing parent), 'text' is the commit message and 'files' lists
1609 missing parent), 'text' is the commit message and 'files' lists
1605 names of files touched by the revision (normalized and relative to
1610 names of files touched by the revision (normalized and relative to
1606 repository root).
1611 repository root).
1607
1612
1608 filectxfn(repo, memctx, path) is a callable receiving the
1613 filectxfn(repo, memctx, path) is a callable receiving the
1609 repository, the current memctx object and the normalized path of
1614 repository, the current memctx object and the normalized path of
1610 requested file, relative to repository root. It is fired by the
1615 requested file, relative to repository root. It is fired by the
1611 commit function for every file in 'files', but calls order is
1616 commit function for every file in 'files', but calls order is
1612 undefined. If the file is available in the revision being
1617 undefined. If the file is available in the revision being
1613 committed (updated or added), filectxfn returns a memfilectx
1618 committed (updated or added), filectxfn returns a memfilectx
1614 object. If the file was removed, filectxfn raises an
1619 object. If the file was removed, filectxfn raises an
1615 IOError. Moved files are represented by marking the source file
1620 IOError. Moved files are represented by marking the source file
1616 removed and the new file added with copy information (see
1621 removed and the new file added with copy information (see
1617 memfilectx).
1622 memfilectx).
1618
1623
1619 user receives the committer name and defaults to current
1624 user receives the committer name and defaults to current
1620 repository username, date is the commit date in any format
1625 repository username, date is the commit date in any format
1621 supported by util.parsedate() and defaults to current date, extra
1626 supported by util.parsedate() and defaults to current date, extra
1622 is a dictionary of metadata or is left empty.
1627 is a dictionary of metadata or is left empty.
1623 """
1628 """
1624
1629
1625 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1630 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1626 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1631 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1627 # this field to determine what to do in filectxfn.
1632 # this field to determine what to do in filectxfn.
1628 _returnnoneformissingfiles = True
1633 _returnnoneformissingfiles = True
1629
1634
1630 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1635 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1631 date=None, extra=None, editor=False):
1636 date=None, extra=None, editor=False):
1632 super(memctx, self).__init__(repo, text, user, date, extra)
1637 super(memctx, self).__init__(repo, text, user, date, extra)
1633 self._rev = None
1638 self._rev = None
1634 self._node = None
1639 self._node = None
1635 parents = [(p or nullid) for p in parents]
1640 parents = [(p or nullid) for p in parents]
1636 p1, p2 = parents
1641 p1, p2 = parents
1637 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1642 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1638 files = sorted(set(files))
1643 files = sorted(set(files))
1639 self._status = scmutil.status(files, [], [], [], [], [], [])
1644 self._status = scmutil.status(files, [], [], [], [], [], [])
1640 self._filectxfn = filectxfn
1645 self._filectxfn = filectxfn
1641 self.substate = {}
1646 self.substate = {}
1642
1647
1643 # if store is not callable, wrap it in a function
1648 # if store is not callable, wrap it in a function
1644 if not callable(filectxfn):
1649 if not callable(filectxfn):
1645 def getfilectx(repo, memctx, path):
1650 def getfilectx(repo, memctx, path):
1646 fctx = filectxfn[path]
1651 fctx = filectxfn[path]
1647 # this is weird but apparently we only keep track of one parent
1652 # this is weird but apparently we only keep track of one parent
1648 # (why not only store that instead of a tuple?)
1653 # (why not only store that instead of a tuple?)
1649 copied = fctx.renamed()
1654 copied = fctx.renamed()
1650 if copied:
1655 if copied:
1651 copied = copied[0]
1656 copied = copied[0]
1652 return memfilectx(repo, path, fctx.data(),
1657 return memfilectx(repo, path, fctx.data(),
1653 islink=fctx.islink(), isexec=fctx.isexec(),
1658 islink=fctx.islink(), isexec=fctx.isexec(),
1654 copied=copied, memctx=memctx)
1659 copied=copied, memctx=memctx)
1655 self._filectxfn = getfilectx
1660 self._filectxfn = getfilectx
1656
1661
1657 self._extra = extra and extra.copy() or {}
1662 self._extra = extra and extra.copy() or {}
1658 if self._extra.get('branch', '') == '':
1663 if self._extra.get('branch', '') == '':
1659 self._extra['branch'] = 'default'
1664 self._extra['branch'] = 'default'
1660
1665
1661 if editor:
1666 if editor:
1662 self._text = editor(self._repo, self, [])
1667 self._text = editor(self._repo, self, [])
1663 self._repo.savecommitmessage(self._text)
1668 self._repo.savecommitmessage(self._text)
1664
1669
1665 def filectx(self, path, filelog=None):
1670 def filectx(self, path, filelog=None):
1666 """get a file context from the working directory
1671 """get a file context from the working directory
1667
1672
1668 Returns None if file doesn't exist and should be removed."""
1673 Returns None if file doesn't exist and should be removed."""
1669 return self._filectxfn(self._repo, self, path)
1674 return self._filectxfn(self._repo, self, path)
1670
1675
1671 def commit(self):
1676 def commit(self):
1672 """commit context to the repo"""
1677 """commit context to the repo"""
1673 return self._repo.commitctx(self)
1678 return self._repo.commitctx(self)
1674
1679
1675 @propertycache
1680 @propertycache
1676 def _manifest(self):
1681 def _manifest(self):
1677 """generate a manifest based on the return values of filectxfn"""
1682 """generate a manifest based on the return values of filectxfn"""
1678
1683
1679 # keep this simple for now; just worry about p1
1684 # keep this simple for now; just worry about p1
1680 pctx = self._parents[0]
1685 pctx = self._parents[0]
1681 man = pctx.manifest().copy()
1686 man = pctx.manifest().copy()
1682
1687
1683 for f, fnode in man.iteritems():
1688 for f, fnode in man.iteritems():
1684 p1node = nullid
1689 p1node = nullid
1685 p2node = nullid
1690 p2node = nullid
1686 p = pctx[f].parents() # if file isn't in pctx, check p2?
1691 p = pctx[f].parents() # if file isn't in pctx, check p2?
1687 if len(p) > 0:
1692 if len(p) > 0:
1688 p1node = p[0].node()
1693 p1node = p[0].node()
1689 if len(p) > 1:
1694 if len(p) > 1:
1690 p2node = p[1].node()
1695 p2node = p[1].node()
1691 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1696 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1692
1697
1693 return man
1698 return man
1694
1699
1695
1700
1696 class memfilectx(committablefilectx):
1701 class memfilectx(committablefilectx):
1697 """memfilectx represents an in-memory file to commit.
1702 """memfilectx represents an in-memory file to commit.
1698
1703
1699 See memctx and commitablefilectx for more details.
1704 See memctx and commitablefilectx for more details.
1700 """
1705 """
1701 def __init__(self, repo, path, data, islink=False,
1706 def __init__(self, repo, path, data, islink=False,
1702 isexec=False, copied=None, memctx=None):
1707 isexec=False, copied=None, memctx=None):
1703 """
1708 """
1704 path is the normalized file path relative to repository root.
1709 path is the normalized file path relative to repository root.
1705 data is the file content as a string.
1710 data is the file content as a string.
1706 islink is True if the file is a symbolic link.
1711 islink is True if the file is a symbolic link.
1707 isexec is True if the file is executable.
1712 isexec is True if the file is executable.
1708 copied is the source file path if current file was copied in the
1713 copied is the source file path if current file was copied in the
1709 revision being committed, or None."""
1714 revision being committed, or None."""
1710 super(memfilectx, self).__init__(repo, path, None, memctx)
1715 super(memfilectx, self).__init__(repo, path, None, memctx)
1711 self._data = data
1716 self._data = data
1712 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1717 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1713 self._copied = None
1718 self._copied = None
1714 if copied:
1719 if copied:
1715 self._copied = (copied, nullid)
1720 self._copied = (copied, nullid)
1716
1721
1717 def data(self):
1722 def data(self):
1718 return self._data
1723 return self._data
1719 def size(self):
1724 def size(self):
1720 return len(self.data())
1725 return len(self.data())
1721 def flags(self):
1726 def flags(self):
1722 return self._flags
1727 return self._flags
1723 def renamed(self):
1728 def renamed(self):
1724 return self._copied
1729 return self._copied
1725
1730
1726 def remove(self, ignoremissing=False):
1731 def remove(self, ignoremissing=False):
1727 """wraps unlink for a repo's working directory"""
1732 """wraps unlink for a repo's working directory"""
1728 # need to figure out what to do here
1733 # need to figure out what to do here
1729 del self._changectx[self._path]
1734 del self._changectx[self._path]
1730
1735
1731 def write(self, data, flags):
1736 def write(self, data, flags):
1732 """wraps repo.wwrite"""
1737 """wraps repo.wwrite"""
1733 self._data = data
1738 self._data = data
General Comments 0
You need to be logged in to leave comments. Login now