##// END OF EJS Templates
merge with stable
Augie Fackler -
r36708:b529e640 merge default
parent child Browse files
Show More
@@ -1,2748 +1,2749 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import filecmp
11 import filecmp
12 import os
12 import os
13 import re
13 import re
14 import stat
14 import stat
15
15
16 from .i18n import _
16 from .i18n import _
17 from .node import (
17 from .node import (
18 addednodeid,
18 addednodeid,
19 bin,
19 bin,
20 hex,
20 hex,
21 modifiednodeid,
21 modifiednodeid,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirnodes,
26 wdirnodes,
27 wdirrev,
27 wdirrev,
28 )
28 )
29 from .thirdparty import (
29 from .thirdparty import (
30 attr,
30 attr,
31 )
31 )
32 from . import (
32 from . import (
33 encoding,
33 encoding,
34 error,
34 error,
35 fileset,
35 fileset,
36 match as matchmod,
36 match as matchmod,
37 mdiff,
37 mdiff,
38 obsolete as obsmod,
38 obsolete as obsmod,
39 obsutil,
39 obsutil,
40 patch,
40 patch,
41 pathutil,
41 pathutil,
42 phases,
42 phases,
43 pycompat,
43 pycompat,
44 repoview,
44 repoview,
45 revlog,
45 revlog,
46 scmutil,
46 scmutil,
47 sparse,
47 sparse,
48 subrepo,
48 subrepo,
49 subrepoutil,
49 subrepoutil,
50 util,
50 util,
51 )
51 )
52 from .utils import dateutil
52 from .utils import dateutil
53
53
54 propertycache = util.propertycache
54 propertycache = util.propertycache
55
55
56 nonascii = re.compile(r'[^\x21-\x7f]').search
56 nonascii = re.compile(r'[^\x21-\x7f]').search
57
57
58 class basectx(object):
58 class basectx(object):
59 """A basectx object represents the common logic for its children:
59 """A basectx object represents the common logic for its children:
60 changectx: read-only context that is already present in the repo,
60 changectx: read-only context that is already present in the repo,
61 workingctx: a context that represents the working directory and can
61 workingctx: a context that represents the working directory and can
62 be committed,
62 be committed,
63 memctx: a context that represents changes in-memory and can also
63 memctx: a context that represents changes in-memory and can also
64 be committed."""
64 be committed."""
65 def __new__(cls, repo, changeid='', *args, **kwargs):
65 def __new__(cls, repo, changeid='', *args, **kwargs):
66 if isinstance(changeid, basectx):
66 if isinstance(changeid, basectx):
67 return changeid
67 return changeid
68
68
69 o = super(basectx, cls).__new__(cls)
69 o = super(basectx, cls).__new__(cls)
70
70
71 o._repo = repo
71 o._repo = repo
72 o._rev = nullrev
72 o._rev = nullrev
73 o._node = nullid
73 o._node = nullid
74
74
75 return o
75 return o
76
76
77 def __bytes__(self):
77 def __bytes__(self):
78 return short(self.node())
78 return short(self.node())
79
79
80 __str__ = encoding.strmethod(__bytes__)
80 __str__ = encoding.strmethod(__bytes__)
81
81
82 def __repr__(self):
82 def __repr__(self):
83 return r"<%s %s>" % (type(self).__name__, str(self))
83 return r"<%s %s>" % (type(self).__name__, str(self))
84
84
85 def __eq__(self, other):
85 def __eq__(self, other):
86 try:
86 try:
87 return type(self) == type(other) and self._rev == other._rev
87 return type(self) == type(other) and self._rev == other._rev
88 except AttributeError:
88 except AttributeError:
89 return False
89 return False
90
90
91 def __ne__(self, other):
91 def __ne__(self, other):
92 return not (self == other)
92 return not (self == other)
93
93
94 def __contains__(self, key):
94 def __contains__(self, key):
95 return key in self._manifest
95 return key in self._manifest
96
96
97 def __getitem__(self, key):
97 def __getitem__(self, key):
98 return self.filectx(key)
98 return self.filectx(key)
99
99
100 def __iter__(self):
100 def __iter__(self):
101 return iter(self._manifest)
101 return iter(self._manifest)
102
102
103 def _buildstatusmanifest(self, status):
103 def _buildstatusmanifest(self, status):
104 """Builds a manifest that includes the given status results, if this is
104 """Builds a manifest that includes the given status results, if this is
105 a working copy context. For non-working copy contexts, it just returns
105 a working copy context. For non-working copy contexts, it just returns
106 the normal manifest."""
106 the normal manifest."""
107 return self.manifest()
107 return self.manifest()
108
108
109 def _matchstatus(self, other, match):
109 def _matchstatus(self, other, match):
110 """This internal method provides a way for child objects to override the
110 """This internal method provides a way for child objects to override the
111 match operator.
111 match operator.
112 """
112 """
113 return match
113 return match
114
114
115 def _buildstatus(self, other, s, match, listignored, listclean,
115 def _buildstatus(self, other, s, match, listignored, listclean,
116 listunknown):
116 listunknown):
117 """build a status with respect to another context"""
117 """build a status with respect to another context"""
118 # Load earliest manifest first for caching reasons. More specifically,
118 # Load earliest manifest first for caching reasons. More specifically,
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
119 # if you have revisions 1000 and 1001, 1001 is probably stored as a
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
120 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
121 # 1000 and cache it so that when you read 1001, we just need to apply a
121 # 1000 and cache it so that when you read 1001, we just need to apply a
122 # delta to what's in the cache. So that's one full reconstruction + one
122 # delta to what's in the cache. So that's one full reconstruction + one
123 # delta application.
123 # delta application.
124 mf2 = None
124 mf2 = None
125 if self.rev() is not None and self.rev() < other.rev():
125 if self.rev() is not None and self.rev() < other.rev():
126 mf2 = self._buildstatusmanifest(s)
126 mf2 = self._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
127 mf1 = other._buildstatusmanifest(s)
128 if mf2 is None:
128 if mf2 is None:
129 mf2 = self._buildstatusmanifest(s)
129 mf2 = self._buildstatusmanifest(s)
130
130
131 modified, added = [], []
131 modified, added = [], []
132 removed = []
132 removed = []
133 clean = []
133 clean = []
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
134 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
135 deletedset = set(deleted)
135 deletedset = set(deleted)
136 d = mf1.diff(mf2, match=match, clean=listclean)
136 d = mf1.diff(mf2, match=match, clean=listclean)
137 for fn, value in d.iteritems():
137 for fn, value in d.iteritems():
138 if fn in deletedset:
138 if fn in deletedset:
139 continue
139 continue
140 if value is None:
140 if value is None:
141 clean.append(fn)
141 clean.append(fn)
142 continue
142 continue
143 (node1, flag1), (node2, flag2) = value
143 (node1, flag1), (node2, flag2) = value
144 if node1 is None:
144 if node1 is None:
145 added.append(fn)
145 added.append(fn)
146 elif node2 is None:
146 elif node2 is None:
147 removed.append(fn)
147 removed.append(fn)
148 elif flag1 != flag2:
148 elif flag1 != flag2:
149 modified.append(fn)
149 modified.append(fn)
150 elif node2 not in wdirnodes:
150 elif node2 not in wdirnodes:
151 # When comparing files between two commits, we save time by
151 # When comparing files between two commits, we save time by
152 # not comparing the file contents when the nodeids differ.
152 # not comparing the file contents when the nodeids differ.
153 # Note that this means we incorrectly report a reverted change
153 # Note that this means we incorrectly report a reverted change
154 # to a file as a modification.
154 # to a file as a modification.
155 modified.append(fn)
155 modified.append(fn)
156 elif self[fn].cmp(other[fn]):
156 elif self[fn].cmp(other[fn]):
157 modified.append(fn)
157 modified.append(fn)
158 else:
158 else:
159 clean.append(fn)
159 clean.append(fn)
160
160
161 if removed:
161 if removed:
162 # need to filter files if they are already reported as removed
162 # need to filter files if they are already reported as removed
163 unknown = [fn for fn in unknown if fn not in mf1 and
163 unknown = [fn for fn in unknown if fn not in mf1 and
164 (not match or match(fn))]
164 (not match or match(fn))]
165 ignored = [fn for fn in ignored if fn not in mf1 and
165 ignored = [fn for fn in ignored if fn not in mf1 and
166 (not match or match(fn))]
166 (not match or match(fn))]
167 # if they're deleted, don't report them as removed
167 # if they're deleted, don't report them as removed
168 removed = [fn for fn in removed if fn not in deletedset]
168 removed = [fn for fn in removed if fn not in deletedset]
169
169
170 return scmutil.status(modified, added, removed, deleted, unknown,
170 return scmutil.status(modified, added, removed, deleted, unknown,
171 ignored, clean)
171 ignored, clean)
172
172
173 @propertycache
173 @propertycache
174 def substate(self):
174 def substate(self):
175 return subrepoutil.state(self, self._repo.ui)
175 return subrepoutil.state(self, self._repo.ui)
176
176
177 def subrev(self, subpath):
177 def subrev(self, subpath):
178 return self.substate[subpath][1]
178 return self.substate[subpath][1]
179
179
180 def rev(self):
180 def rev(self):
181 return self._rev
181 return self._rev
182 def node(self):
182 def node(self):
183 return self._node
183 return self._node
184 def hex(self):
184 def hex(self):
185 return hex(self.node())
185 return hex(self.node())
186 def manifest(self):
186 def manifest(self):
187 return self._manifest
187 return self._manifest
188 def manifestctx(self):
188 def manifestctx(self):
189 return self._manifestctx
189 return self._manifestctx
190 def repo(self):
190 def repo(self):
191 return self._repo
191 return self._repo
192 def phasestr(self):
192 def phasestr(self):
193 return phases.phasenames[self.phase()]
193 return phases.phasenames[self.phase()]
194 def mutable(self):
194 def mutable(self):
195 return self.phase() > phases.public
195 return self.phase() > phases.public
196
196
197 def getfileset(self, expr):
197 def getfileset(self, expr):
198 return fileset.getfileset(self, expr)
198 return fileset.getfileset(self, expr)
199
199
200 def obsolete(self):
200 def obsolete(self):
201 """True if the changeset is obsolete"""
201 """True if the changeset is obsolete"""
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
202 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
203
203
204 def extinct(self):
204 def extinct(self):
205 """True if the changeset is extinct"""
205 """True if the changeset is extinct"""
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
206 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
207
207
208 def orphan(self):
208 def orphan(self):
209 """True if the changeset is not obsolete but it's ancestor are"""
209 """True if the changeset is not obsolete but it's ancestor are"""
210 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
210 return self.rev() in obsmod.getrevs(self._repo, 'orphan')
211
211
212 def phasedivergent(self):
212 def phasedivergent(self):
213 """True if the changeset try to be a successor of a public changeset
213 """True if the changeset try to be a successor of a public changeset
214
214
215 Only non-public and non-obsolete changesets may be bumped.
215 Only non-public and non-obsolete changesets may be bumped.
216 """
216 """
217 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
217 return self.rev() in obsmod.getrevs(self._repo, 'phasedivergent')
218
218
219 def contentdivergent(self):
219 def contentdivergent(self):
220 """Is a successors of a changeset with multiple possible successors set
220 """Is a successors of a changeset with multiple possible successors set
221
221
222 Only non-public and non-obsolete changesets may be divergent.
222 Only non-public and non-obsolete changesets may be divergent.
223 """
223 """
224 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
224 return self.rev() in obsmod.getrevs(self._repo, 'contentdivergent')
225
225
226 def isunstable(self):
226 def isunstable(self):
227 """True if the changeset is either unstable, bumped or divergent"""
227 """True if the changeset is either unstable, bumped or divergent"""
228 return self.orphan() or self.phasedivergent() or self.contentdivergent()
228 return self.orphan() or self.phasedivergent() or self.contentdivergent()
229
229
230 def instabilities(self):
230 def instabilities(self):
231 """return the list of instabilities affecting this changeset.
231 """return the list of instabilities affecting this changeset.
232
232
233 Instabilities are returned as strings. possible values are:
233 Instabilities are returned as strings. possible values are:
234 - orphan,
234 - orphan,
235 - phase-divergent,
235 - phase-divergent,
236 - content-divergent.
236 - content-divergent.
237 """
237 """
238 instabilities = []
238 instabilities = []
239 if self.orphan():
239 if self.orphan():
240 instabilities.append('orphan')
240 instabilities.append('orphan')
241 if self.phasedivergent():
241 if self.phasedivergent():
242 instabilities.append('phase-divergent')
242 instabilities.append('phase-divergent')
243 if self.contentdivergent():
243 if self.contentdivergent():
244 instabilities.append('content-divergent')
244 instabilities.append('content-divergent')
245 return instabilities
245 return instabilities
246
246
247 def parents(self):
247 def parents(self):
248 """return contexts for each parent changeset"""
248 """return contexts for each parent changeset"""
249 return self._parents
249 return self._parents
250
250
251 def p1(self):
251 def p1(self):
252 return self._parents[0]
252 return self._parents[0]
253
253
254 def p2(self):
254 def p2(self):
255 parents = self._parents
255 parents = self._parents
256 if len(parents) == 2:
256 if len(parents) == 2:
257 return parents[1]
257 return parents[1]
258 return changectx(self._repo, nullrev)
258 return changectx(self._repo, nullrev)
259
259
260 def _fileinfo(self, path):
260 def _fileinfo(self, path):
261 if r'_manifest' in self.__dict__:
261 if r'_manifest' in self.__dict__:
262 try:
262 try:
263 return self._manifest[path], self._manifest.flags(path)
263 return self._manifest[path], self._manifest.flags(path)
264 except KeyError:
264 except KeyError:
265 raise error.ManifestLookupError(self._node, path,
265 raise error.ManifestLookupError(self._node, path,
266 _('not found in manifest'))
266 _('not found in manifest'))
267 if r'_manifestdelta' in self.__dict__ or path in self.files():
267 if r'_manifestdelta' in self.__dict__ or path in self.files():
268 if path in self._manifestdelta:
268 if path in self._manifestdelta:
269 return (self._manifestdelta[path],
269 return (self._manifestdelta[path],
270 self._manifestdelta.flags(path))
270 self._manifestdelta.flags(path))
271 mfl = self._repo.manifestlog
271 mfl = self._repo.manifestlog
272 try:
272 try:
273 node, flag = mfl[self._changeset.manifest].find(path)
273 node, flag = mfl[self._changeset.manifest].find(path)
274 except KeyError:
274 except KeyError:
275 raise error.ManifestLookupError(self._node, path,
275 raise error.ManifestLookupError(self._node, path,
276 _('not found in manifest'))
276 _('not found in manifest'))
277
277
278 return node, flag
278 return node, flag
279
279
280 def filenode(self, path):
280 def filenode(self, path):
281 return self._fileinfo(path)[0]
281 return self._fileinfo(path)[0]
282
282
283 def flags(self, path):
283 def flags(self, path):
284 try:
284 try:
285 return self._fileinfo(path)[1]
285 return self._fileinfo(path)[1]
286 except error.LookupError:
286 except error.LookupError:
287 return ''
287 return ''
288
288
289 def sub(self, path, allowcreate=True):
289 def sub(self, path, allowcreate=True):
290 '''return a subrepo for the stored revision of path, never wdir()'''
290 '''return a subrepo for the stored revision of path, never wdir()'''
291 return subrepo.subrepo(self, path, allowcreate=allowcreate)
291 return subrepo.subrepo(self, path, allowcreate=allowcreate)
292
292
293 def nullsub(self, path, pctx):
293 def nullsub(self, path, pctx):
294 return subrepo.nullsubrepo(self, path, pctx)
294 return subrepo.nullsubrepo(self, path, pctx)
295
295
296 def workingsub(self, path):
296 def workingsub(self, path):
297 '''return a subrepo for the stored revision, or wdir if this is a wdir
297 '''return a subrepo for the stored revision, or wdir if this is a wdir
298 context.
298 context.
299 '''
299 '''
300 return subrepo.subrepo(self, path, allowwdir=True)
300 return subrepo.subrepo(self, path, allowwdir=True)
301
301
302 def match(self, pats=None, include=None, exclude=None, default='glob',
302 def match(self, pats=None, include=None, exclude=None, default='glob',
303 listsubrepos=False, badfn=None):
303 listsubrepos=False, badfn=None):
304 r = self._repo
304 r = self._repo
305 return matchmod.match(r.root, r.getcwd(), pats,
305 return matchmod.match(r.root, r.getcwd(), pats,
306 include, exclude, default,
306 include, exclude, default,
307 auditor=r.nofsauditor, ctx=self,
307 auditor=r.nofsauditor, ctx=self,
308 listsubrepos=listsubrepos, badfn=badfn)
308 listsubrepos=listsubrepos, badfn=badfn)
309
309
310 def diff(self, ctx2=None, match=None, **opts):
310 def diff(self, ctx2=None, match=None, **opts):
311 """Returns a diff generator for the given contexts and matcher"""
311 """Returns a diff generator for the given contexts and matcher"""
312 if ctx2 is None:
312 if ctx2 is None:
313 ctx2 = self.p1()
313 ctx2 = self.p1()
314 if ctx2 is not None:
314 if ctx2 is not None:
315 ctx2 = self._repo[ctx2]
315 ctx2 = self._repo[ctx2]
316 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
316 diffopts = patch.diffopts(self._repo.ui, pycompat.byteskwargs(opts))
317 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
317 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
318
318
319 def dirs(self):
319 def dirs(self):
320 return self._manifest.dirs()
320 return self._manifest.dirs()
321
321
322 def hasdir(self, dir):
322 def hasdir(self, dir):
323 return self._manifest.hasdir(dir)
323 return self._manifest.hasdir(dir)
324
324
325 def status(self, other=None, match=None, listignored=False,
325 def status(self, other=None, match=None, listignored=False,
326 listclean=False, listunknown=False, listsubrepos=False):
326 listclean=False, listunknown=False, listsubrepos=False):
327 """return status of files between two nodes or node and working
327 """return status of files between two nodes or node and working
328 directory.
328 directory.
329
329
330 If other is None, compare this node with working directory.
330 If other is None, compare this node with working directory.
331
331
332 returns (modified, added, removed, deleted, unknown, ignored, clean)
332 returns (modified, added, removed, deleted, unknown, ignored, clean)
333 """
333 """
334
334
335 ctx1 = self
335 ctx1 = self
336 ctx2 = self._repo[other]
336 ctx2 = self._repo[other]
337
337
338 # This next code block is, admittedly, fragile logic that tests for
338 # This next code block is, admittedly, fragile logic that tests for
339 # reversing the contexts and wouldn't need to exist if it weren't for
339 # reversing the contexts and wouldn't need to exist if it weren't for
340 # the fast (and common) code path of comparing the working directory
340 # the fast (and common) code path of comparing the working directory
341 # with its first parent.
341 # with its first parent.
342 #
342 #
343 # What we're aiming for here is the ability to call:
343 # What we're aiming for here is the ability to call:
344 #
344 #
345 # workingctx.status(parentctx)
345 # workingctx.status(parentctx)
346 #
346 #
347 # If we always built the manifest for each context and compared those,
347 # If we always built the manifest for each context and compared those,
348 # then we'd be done. But the special case of the above call means we
348 # then we'd be done. But the special case of the above call means we
349 # just copy the manifest of the parent.
349 # just copy the manifest of the parent.
350 reversed = False
350 reversed = False
351 if (not isinstance(ctx1, changectx)
351 if (not isinstance(ctx1, changectx)
352 and isinstance(ctx2, changectx)):
352 and isinstance(ctx2, changectx)):
353 reversed = True
353 reversed = True
354 ctx1, ctx2 = ctx2, ctx1
354 ctx1, ctx2 = ctx2, ctx1
355
355
356 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
356 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
357 match = ctx2._matchstatus(ctx1, match)
357 match = ctx2._matchstatus(ctx1, match)
358 r = scmutil.status([], [], [], [], [], [], [])
358 r = scmutil.status([], [], [], [], [], [], [])
359 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
359 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
360 listunknown)
360 listunknown)
361
361
362 if reversed:
362 if reversed:
363 # Reverse added and removed. Clear deleted, unknown and ignored as
363 # Reverse added and removed. Clear deleted, unknown and ignored as
364 # these make no sense to reverse.
364 # these make no sense to reverse.
365 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
365 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
366 r.clean)
366 r.clean)
367
367
368 if listsubrepos:
368 if listsubrepos:
369 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
369 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
370 try:
370 try:
371 rev2 = ctx2.subrev(subpath)
371 rev2 = ctx2.subrev(subpath)
372 except KeyError:
372 except KeyError:
373 # A subrepo that existed in node1 was deleted between
373 # A subrepo that existed in node1 was deleted between
374 # node1 and node2 (inclusive). Thus, ctx2's substate
374 # node1 and node2 (inclusive). Thus, ctx2's substate
375 # won't contain that subpath. The best we can do ignore it.
375 # won't contain that subpath. The best we can do ignore it.
376 rev2 = None
376 rev2 = None
377 submatch = matchmod.subdirmatcher(subpath, match)
377 submatch = matchmod.subdirmatcher(subpath, match)
378 s = sub.status(rev2, match=submatch, ignored=listignored,
378 s = sub.status(rev2, match=submatch, ignored=listignored,
379 clean=listclean, unknown=listunknown,
379 clean=listclean, unknown=listunknown,
380 listsubrepos=True)
380 listsubrepos=True)
381 for rfiles, sfiles in zip(r, s):
381 for rfiles, sfiles in zip(r, s):
382 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
382 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
383
383
384 for l in r:
384 for l in r:
385 l.sort()
385 l.sort()
386
386
387 return r
387 return r
388
388
389 def _filterederror(repo, changeid):
389 def _filterederror(repo, changeid):
390 """build an exception to be raised about a filtered changeid
390 """build an exception to be raised about a filtered changeid
391
391
392 This is extracted in a function to help extensions (eg: evolve) to
392 This is extracted in a function to help extensions (eg: evolve) to
393 experiment with various message variants."""
393 experiment with various message variants."""
394 if repo.filtername.startswith('visible'):
394 if repo.filtername.startswith('visible'):
395
395
396 # Check if the changeset is obsolete
396 # Check if the changeset is obsolete
397 unfilteredrepo = repo.unfiltered()
397 unfilteredrepo = repo.unfiltered()
398 ctx = unfilteredrepo[changeid]
398 ctx = unfilteredrepo[changeid]
399
399
400 # If the changeset is obsolete, enrich the message with the reason
400 # If the changeset is obsolete, enrich the message with the reason
401 # that made this changeset not visible
401 # that made this changeset not visible
402 if ctx.obsolete():
402 if ctx.obsolete():
403 msg = obsutil._getfilteredreason(repo, changeid, ctx)
403 msg = obsutil._getfilteredreason(repo, changeid, ctx)
404 else:
404 else:
405 msg = _("hidden revision '%s'") % changeid
405 msg = _("hidden revision '%s'") % changeid
406
406
407 hint = _('use --hidden to access hidden revisions')
407 hint = _('use --hidden to access hidden revisions')
408
408
409 return error.FilteredRepoLookupError(msg, hint=hint)
409 return error.FilteredRepoLookupError(msg, hint=hint)
410 msg = _("filtered revision '%s' (not in '%s' subset)")
410 msg = _("filtered revision '%s' (not in '%s' subset)")
411 msg %= (changeid, repo.filtername)
411 msg %= (changeid, repo.filtername)
412 return error.FilteredRepoLookupError(msg)
412 return error.FilteredRepoLookupError(msg)
413
413
414 class changectx(basectx):
414 class changectx(basectx):
415 """A changecontext object makes access to data related to a particular
415 """A changecontext object makes access to data related to a particular
416 changeset convenient. It represents a read-only context already present in
416 changeset convenient. It represents a read-only context already present in
417 the repo."""
417 the repo."""
418 def __init__(self, repo, changeid=''):
418 def __init__(self, repo, changeid=''):
419 """changeid is a revision number, node, or tag"""
419 """changeid is a revision number, node, or tag"""
420
420
421 # since basectx.__new__ already took care of copying the object, we
421 # since basectx.__new__ already took care of copying the object, we
422 # don't need to do anything in __init__, so we just exit here
422 # don't need to do anything in __init__, so we just exit here
423 if isinstance(changeid, basectx):
423 if isinstance(changeid, basectx):
424 return
424 return
425
425
426 if changeid == '':
426 if changeid == '':
427 changeid = '.'
427 changeid = '.'
428 self._repo = repo
428 self._repo = repo
429
429
430 try:
430 try:
431 if isinstance(changeid, int):
431 if isinstance(changeid, int):
432 self._node = repo.changelog.node(changeid)
432 self._node = repo.changelog.node(changeid)
433 self._rev = changeid
433 self._rev = changeid
434 return
434 return
435 if not pycompat.ispy3 and isinstance(changeid, long):
435 if not pycompat.ispy3 and isinstance(changeid, long):
436 changeid = str(changeid)
436 changeid = str(changeid)
437 if changeid == 'null':
437 if changeid == 'null':
438 self._node = nullid
438 self._node = nullid
439 self._rev = nullrev
439 self._rev = nullrev
440 return
440 return
441 if changeid == 'tip':
441 if changeid == 'tip':
442 self._node = repo.changelog.tip()
442 self._node = repo.changelog.tip()
443 self._rev = repo.changelog.rev(self._node)
443 self._rev = repo.changelog.rev(self._node)
444 return
444 return
445 if (changeid == '.'
445 if (changeid == '.'
446 or repo.local() and changeid == repo.dirstate.p1()):
446 or repo.local() and changeid == repo.dirstate.p1()):
447 # this is a hack to delay/avoid loading obsmarkers
447 # this is a hack to delay/avoid loading obsmarkers
448 # when we know that '.' won't be hidden
448 # when we know that '.' won't be hidden
449 self._node = repo.dirstate.p1()
449 self._node = repo.dirstate.p1()
450 self._rev = repo.unfiltered().changelog.rev(self._node)
450 self._rev = repo.unfiltered().changelog.rev(self._node)
451 return
451 return
452 if len(changeid) == 20:
452 if len(changeid) == 20:
453 try:
453 try:
454 self._node = changeid
454 self._node = changeid
455 self._rev = repo.changelog.rev(changeid)
455 self._rev = repo.changelog.rev(changeid)
456 return
456 return
457 except error.FilteredRepoLookupError:
457 except error.FilteredRepoLookupError:
458 raise
458 raise
459 except LookupError:
459 except LookupError:
460 pass
460 pass
461
461
462 try:
462 try:
463 r = int(changeid)
463 r = int(changeid)
464 if '%d' % r != changeid:
464 if '%d' % r != changeid:
465 raise ValueError
465 raise ValueError
466 l = len(repo.changelog)
466 l = len(repo.changelog)
467 if r < 0:
467 if r < 0:
468 r += l
468 r += l
469 if r < 0 or r >= l and r != wdirrev:
469 if r < 0 or r >= l and r != wdirrev:
470 raise ValueError
470 raise ValueError
471 self._rev = r
471 self._rev = r
472 self._node = repo.changelog.node(r)
472 self._node = repo.changelog.node(r)
473 return
473 return
474 except error.FilteredIndexError:
474 except error.FilteredIndexError:
475 raise
475 raise
476 except (ValueError, OverflowError, IndexError):
476 except (ValueError, OverflowError, IndexError):
477 pass
477 pass
478
478
479 if len(changeid) == 40:
479 if len(changeid) == 40:
480 try:
480 try:
481 self._node = bin(changeid)
481 self._node = bin(changeid)
482 self._rev = repo.changelog.rev(self._node)
482 self._rev = repo.changelog.rev(self._node)
483 return
483 return
484 except error.FilteredLookupError:
484 except error.FilteredLookupError:
485 raise
485 raise
486 except (TypeError, LookupError):
486 except (TypeError, LookupError):
487 pass
487 pass
488
488
489 # lookup bookmarks through the name interface
489 # lookup bookmarks through the name interface
490 try:
490 try:
491 self._node = repo.names.singlenode(repo, changeid)
491 self._node = repo.names.singlenode(repo, changeid)
492 self._rev = repo.changelog.rev(self._node)
492 self._rev = repo.changelog.rev(self._node)
493 return
493 return
494 except KeyError:
494 except KeyError:
495 pass
495 pass
496 except error.FilteredRepoLookupError:
496 except error.FilteredRepoLookupError:
497 raise
497 raise
498 except error.RepoLookupError:
498 except error.RepoLookupError:
499 pass
499 pass
500
500
501 self._node = repo.unfiltered().changelog._partialmatch(changeid)
501 self._node = repo.unfiltered().changelog._partialmatch(changeid)
502 if self._node is not None:
502 if self._node is not None:
503 self._rev = repo.changelog.rev(self._node)
503 self._rev = repo.changelog.rev(self._node)
504 return
504 return
505
505
506 # lookup failed
506 # lookup failed
507 # check if it might have come from damaged dirstate
507 # check if it might have come from damaged dirstate
508 #
508 #
509 # XXX we could avoid the unfiltered if we had a recognizable
509 # XXX we could avoid the unfiltered if we had a recognizable
510 # exception for filtered changeset access
510 # exception for filtered changeset access
511 if (repo.local()
511 if (repo.local()
512 and changeid in repo.unfiltered().dirstate.parents()):
512 and changeid in repo.unfiltered().dirstate.parents()):
513 msg = _("working directory has unknown parent '%s'!")
513 msg = _("working directory has unknown parent '%s'!")
514 raise error.Abort(msg % short(changeid))
514 raise error.Abort(msg % short(changeid))
515 try:
515 try:
516 if len(changeid) == 20 and nonascii(changeid):
516 if len(changeid) == 20 and nonascii(changeid):
517 changeid = hex(changeid)
517 changeid = hex(changeid)
518 except TypeError:
518 except TypeError:
519 pass
519 pass
520 except (error.FilteredIndexError, error.FilteredLookupError,
520 except (error.FilteredIndexError, error.FilteredLookupError,
521 error.FilteredRepoLookupError):
521 error.FilteredRepoLookupError):
522 raise _filterederror(repo, changeid)
522 raise _filterederror(repo, changeid)
523 except IndexError:
523 except IndexError:
524 pass
524 pass
525 raise error.RepoLookupError(
525 raise error.RepoLookupError(
526 _("unknown revision '%s'") % changeid)
526 _("unknown revision '%s'") % changeid)
527
527
528 def __hash__(self):
528 def __hash__(self):
529 try:
529 try:
530 return hash(self._rev)
530 return hash(self._rev)
531 except AttributeError:
531 except AttributeError:
532 return id(self)
532 return id(self)
533
533
534 def __nonzero__(self):
534 def __nonzero__(self):
535 return self._rev != nullrev
535 return self._rev != nullrev
536
536
537 __bool__ = __nonzero__
537 __bool__ = __nonzero__
538
538
539 @propertycache
539 @propertycache
540 def _changeset(self):
540 def _changeset(self):
541 return self._repo.changelog.changelogrevision(self.rev())
541 return self._repo.changelog.changelogrevision(self.rev())
542
542
543 @propertycache
543 @propertycache
544 def _manifest(self):
544 def _manifest(self):
545 return self._manifestctx.read()
545 return self._manifestctx.read()
546
546
547 @property
547 @property
548 def _manifestctx(self):
548 def _manifestctx(self):
549 return self._repo.manifestlog[self._changeset.manifest]
549 return self._repo.manifestlog[self._changeset.manifest]
550
550
551 @propertycache
551 @propertycache
552 def _manifestdelta(self):
552 def _manifestdelta(self):
553 return self._manifestctx.readdelta()
553 return self._manifestctx.readdelta()
554
554
555 @propertycache
555 @propertycache
556 def _parents(self):
556 def _parents(self):
557 repo = self._repo
557 repo = self._repo
558 p1, p2 = repo.changelog.parentrevs(self._rev)
558 p1, p2 = repo.changelog.parentrevs(self._rev)
559 if p2 == nullrev:
559 if p2 == nullrev:
560 return [changectx(repo, p1)]
560 return [changectx(repo, p1)]
561 return [changectx(repo, p1), changectx(repo, p2)]
561 return [changectx(repo, p1), changectx(repo, p2)]
562
562
563 def changeset(self):
563 def changeset(self):
564 c = self._changeset
564 c = self._changeset
565 return (
565 return (
566 c.manifest,
566 c.manifest,
567 c.user,
567 c.user,
568 c.date,
568 c.date,
569 c.files,
569 c.files,
570 c.description,
570 c.description,
571 c.extra,
571 c.extra,
572 )
572 )
573 def manifestnode(self):
573 def manifestnode(self):
574 return self._changeset.manifest
574 return self._changeset.manifest
575
575
576 def user(self):
576 def user(self):
577 return self._changeset.user
577 return self._changeset.user
578 def date(self):
578 def date(self):
579 return self._changeset.date
579 return self._changeset.date
580 def files(self):
580 def files(self):
581 return self._changeset.files
581 return self._changeset.files
582 def description(self):
582 def description(self):
583 return self._changeset.description
583 return self._changeset.description
584 def branch(self):
584 def branch(self):
585 return encoding.tolocal(self._changeset.extra.get("branch"))
585 return encoding.tolocal(self._changeset.extra.get("branch"))
586 def closesbranch(self):
586 def closesbranch(self):
587 return 'close' in self._changeset.extra
587 return 'close' in self._changeset.extra
588 def extra(self):
588 def extra(self):
589 """Return a dict of extra information."""
589 """Return a dict of extra information."""
590 return self._changeset.extra
590 return self._changeset.extra
591 def tags(self):
591 def tags(self):
592 """Return a list of byte tag names"""
592 """Return a list of byte tag names"""
593 return self._repo.nodetags(self._node)
593 return self._repo.nodetags(self._node)
594 def bookmarks(self):
594 def bookmarks(self):
595 """Return a list of byte bookmark names."""
595 """Return a list of byte bookmark names."""
596 return self._repo.nodebookmarks(self._node)
596 return self._repo.nodebookmarks(self._node)
597 def phase(self):
597 def phase(self):
598 return self._repo._phasecache.phase(self._repo, self._rev)
598 return self._repo._phasecache.phase(self._repo, self._rev)
599 def hidden(self):
599 def hidden(self):
600 return self._rev in repoview.filterrevs(self._repo, 'visible')
600 return self._rev in repoview.filterrevs(self._repo, 'visible')
601
601
602 def isinmemory(self):
602 def isinmemory(self):
603 return False
603 return False
604
604
605 def children(self):
605 def children(self):
606 """return list of changectx contexts for each child changeset.
606 """return list of changectx contexts for each child changeset.
607
607
608 This returns only the immediate child changesets. Use descendants() to
608 This returns only the immediate child changesets. Use descendants() to
609 recursively walk children.
609 recursively walk children.
610 """
610 """
611 c = self._repo.changelog.children(self._node)
611 c = self._repo.changelog.children(self._node)
612 return [changectx(self._repo, x) for x in c]
612 return [changectx(self._repo, x) for x in c]
613
613
614 def ancestors(self):
614 def ancestors(self):
615 for a in self._repo.changelog.ancestors([self._rev]):
615 for a in self._repo.changelog.ancestors([self._rev]):
616 yield changectx(self._repo, a)
616 yield changectx(self._repo, a)
617
617
618 def descendants(self):
618 def descendants(self):
619 """Recursively yield all children of the changeset.
619 """Recursively yield all children of the changeset.
620
620
621 For just the immediate children, use children()
621 For just the immediate children, use children()
622 """
622 """
623 for d in self._repo.changelog.descendants([self._rev]):
623 for d in self._repo.changelog.descendants([self._rev]):
624 yield changectx(self._repo, d)
624 yield changectx(self._repo, d)
625
625
626 def filectx(self, path, fileid=None, filelog=None):
626 def filectx(self, path, fileid=None, filelog=None):
627 """get a file context from this changeset"""
627 """get a file context from this changeset"""
628 if fileid is None:
628 if fileid is None:
629 fileid = self.filenode(path)
629 fileid = self.filenode(path)
630 return filectx(self._repo, path, fileid=fileid,
630 return filectx(self._repo, path, fileid=fileid,
631 changectx=self, filelog=filelog)
631 changectx=self, filelog=filelog)
632
632
633 def ancestor(self, c2, warn=False):
633 def ancestor(self, c2, warn=False):
634 """return the "best" ancestor context of self and c2
634 """return the "best" ancestor context of self and c2
635
635
636 If there are multiple candidates, it will show a message and check
636 If there are multiple candidates, it will show a message and check
637 merge.preferancestor configuration before falling back to the
637 merge.preferancestor configuration before falling back to the
638 revlog ancestor."""
638 revlog ancestor."""
639 # deal with workingctxs
639 # deal with workingctxs
640 n2 = c2._node
640 n2 = c2._node
641 if n2 is None:
641 if n2 is None:
642 n2 = c2._parents[0]._node
642 n2 = c2._parents[0]._node
643 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
643 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
644 if not cahs:
644 if not cahs:
645 anc = nullid
645 anc = nullid
646 elif len(cahs) == 1:
646 elif len(cahs) == 1:
647 anc = cahs[0]
647 anc = cahs[0]
648 else:
648 else:
649 # experimental config: merge.preferancestor
649 # experimental config: merge.preferancestor
650 for r in self._repo.ui.configlist('merge', 'preferancestor'):
650 for r in self._repo.ui.configlist('merge', 'preferancestor'):
651 try:
651 try:
652 ctx = changectx(self._repo, r)
652 ctx = changectx(self._repo, r)
653 except error.RepoLookupError:
653 except error.RepoLookupError:
654 continue
654 continue
655 anc = ctx.node()
655 anc = ctx.node()
656 if anc in cahs:
656 if anc in cahs:
657 break
657 break
658 else:
658 else:
659 anc = self._repo.changelog.ancestor(self._node, n2)
659 anc = self._repo.changelog.ancestor(self._node, n2)
660 if warn:
660 if warn:
661 self._repo.ui.status(
661 self._repo.ui.status(
662 (_("note: using %s as ancestor of %s and %s\n") %
662 (_("note: using %s as ancestor of %s and %s\n") %
663 (short(anc), short(self._node), short(n2))) +
663 (short(anc), short(self._node), short(n2))) +
664 ''.join(_(" alternatively, use --config "
664 ''.join(_(" alternatively, use --config "
665 "merge.preferancestor=%s\n") %
665 "merge.preferancestor=%s\n") %
666 short(n) for n in sorted(cahs) if n != anc))
666 short(n) for n in sorted(cahs) if n != anc))
667 return changectx(self._repo, anc)
667 return changectx(self._repo, anc)
668
668
669 def descendant(self, other):
669 def descendant(self, other):
670 """True if other is descendant of this changeset"""
670 """True if other is descendant of this changeset"""
671 return self._repo.changelog.descendant(self._rev, other._rev)
671 return self._repo.changelog.descendant(self._rev, other._rev)
672
672
673 def walk(self, match):
673 def walk(self, match):
674 '''Generates matching file names.'''
674 '''Generates matching file names.'''
675
675
676 # Wrap match.bad method to have message with nodeid
676 # Wrap match.bad method to have message with nodeid
677 def bad(fn, msg):
677 def bad(fn, msg):
678 # The manifest doesn't know about subrepos, so don't complain about
678 # The manifest doesn't know about subrepos, so don't complain about
679 # paths into valid subrepos.
679 # paths into valid subrepos.
680 if any(fn == s or fn.startswith(s + '/')
680 if any(fn == s or fn.startswith(s + '/')
681 for s in self.substate):
681 for s in self.substate):
682 return
682 return
683 match.bad(fn, _('no such file in rev %s') % self)
683 match.bad(fn, _('no such file in rev %s') % self)
684
684
685 m = matchmod.badmatch(match, bad)
685 m = matchmod.badmatch(match, bad)
686 return self._manifest.walk(m)
686 return self._manifest.walk(m)
687
687
688 def matches(self, match):
688 def matches(self, match):
689 return self.walk(match)
689 return self.walk(match)
690
690
691 class basefilectx(object):
691 class basefilectx(object):
692 """A filecontext object represents the common logic for its children:
692 """A filecontext object represents the common logic for its children:
693 filectx: read-only access to a filerevision that is already present
693 filectx: read-only access to a filerevision that is already present
694 in the repo,
694 in the repo,
695 workingfilectx: a filecontext that represents files from the working
695 workingfilectx: a filecontext that represents files from the working
696 directory,
696 directory,
697 memfilectx: a filecontext that represents files in-memory,
697 memfilectx: a filecontext that represents files in-memory,
698 overlayfilectx: duplicate another filecontext with some fields overridden.
698 overlayfilectx: duplicate another filecontext with some fields overridden.
699 """
699 """
700 @propertycache
700 @propertycache
701 def _filelog(self):
701 def _filelog(self):
702 return self._repo.file(self._path)
702 return self._repo.file(self._path)
703
703
704 @propertycache
704 @propertycache
705 def _changeid(self):
705 def _changeid(self):
706 if r'_changeid' in self.__dict__:
706 if r'_changeid' in self.__dict__:
707 return self._changeid
707 return self._changeid
708 elif r'_changectx' in self.__dict__:
708 elif r'_changectx' in self.__dict__:
709 return self._changectx.rev()
709 return self._changectx.rev()
710 elif r'_descendantrev' in self.__dict__:
710 elif r'_descendantrev' in self.__dict__:
711 # this file context was created from a revision with a known
711 # this file context was created from a revision with a known
712 # descendant, we can (lazily) correct for linkrev aliases
712 # descendant, we can (lazily) correct for linkrev aliases
713 return self._adjustlinkrev(self._descendantrev)
713 return self._adjustlinkrev(self._descendantrev)
714 else:
714 else:
715 return self._filelog.linkrev(self._filerev)
715 return self._filelog.linkrev(self._filerev)
716
716
717 @propertycache
717 @propertycache
718 def _filenode(self):
718 def _filenode(self):
719 if r'_fileid' in self.__dict__:
719 if r'_fileid' in self.__dict__:
720 return self._filelog.lookup(self._fileid)
720 return self._filelog.lookup(self._fileid)
721 else:
721 else:
722 return self._changectx.filenode(self._path)
722 return self._changectx.filenode(self._path)
723
723
724 @propertycache
724 @propertycache
725 def _filerev(self):
725 def _filerev(self):
726 return self._filelog.rev(self._filenode)
726 return self._filelog.rev(self._filenode)
727
727
728 @propertycache
728 @propertycache
729 def _repopath(self):
729 def _repopath(self):
730 return self._path
730 return self._path
731
731
732 def __nonzero__(self):
732 def __nonzero__(self):
733 try:
733 try:
734 self._filenode
734 self._filenode
735 return True
735 return True
736 except error.LookupError:
736 except error.LookupError:
737 # file is missing
737 # file is missing
738 return False
738 return False
739
739
740 __bool__ = __nonzero__
740 __bool__ = __nonzero__
741
741
742 def __bytes__(self):
742 def __bytes__(self):
743 try:
743 try:
744 return "%s@%s" % (self.path(), self._changectx)
744 return "%s@%s" % (self.path(), self._changectx)
745 except error.LookupError:
745 except error.LookupError:
746 return "%s@???" % self.path()
746 return "%s@???" % self.path()
747
747
748 __str__ = encoding.strmethod(__bytes__)
748 __str__ = encoding.strmethod(__bytes__)
749
749
750 def __repr__(self):
750 def __repr__(self):
751 return "<%s %s>" % (type(self).__name__, str(self))
751 return "<%s %s>" % (type(self).__name__, str(self))
752
752
753 def __hash__(self):
753 def __hash__(self):
754 try:
754 try:
755 return hash((self._path, self._filenode))
755 return hash((self._path, self._filenode))
756 except AttributeError:
756 except AttributeError:
757 return id(self)
757 return id(self)
758
758
759 def __eq__(self, other):
759 def __eq__(self, other):
760 try:
760 try:
761 return (type(self) == type(other) and self._path == other._path
761 return (type(self) == type(other) and self._path == other._path
762 and self._filenode == other._filenode)
762 and self._filenode == other._filenode)
763 except AttributeError:
763 except AttributeError:
764 return False
764 return False
765
765
766 def __ne__(self, other):
766 def __ne__(self, other):
767 return not (self == other)
767 return not (self == other)
768
768
769 def filerev(self):
769 def filerev(self):
770 return self._filerev
770 return self._filerev
771 def filenode(self):
771 def filenode(self):
772 return self._filenode
772 return self._filenode
773 @propertycache
773 @propertycache
774 def _flags(self):
774 def _flags(self):
775 return self._changectx.flags(self._path)
775 return self._changectx.flags(self._path)
776 def flags(self):
776 def flags(self):
777 return self._flags
777 return self._flags
778 def filelog(self):
778 def filelog(self):
779 return self._filelog
779 return self._filelog
780 def rev(self):
780 def rev(self):
781 return self._changeid
781 return self._changeid
782 def linkrev(self):
782 def linkrev(self):
783 return self._filelog.linkrev(self._filerev)
783 return self._filelog.linkrev(self._filerev)
784 def node(self):
784 def node(self):
785 return self._changectx.node()
785 return self._changectx.node()
786 def hex(self):
786 def hex(self):
787 return self._changectx.hex()
787 return self._changectx.hex()
788 def user(self):
788 def user(self):
789 return self._changectx.user()
789 return self._changectx.user()
790 def date(self):
790 def date(self):
791 return self._changectx.date()
791 return self._changectx.date()
792 def files(self):
792 def files(self):
793 return self._changectx.files()
793 return self._changectx.files()
794 def description(self):
794 def description(self):
795 return self._changectx.description()
795 return self._changectx.description()
796 def branch(self):
796 def branch(self):
797 return self._changectx.branch()
797 return self._changectx.branch()
798 def extra(self):
798 def extra(self):
799 return self._changectx.extra()
799 return self._changectx.extra()
800 def phase(self):
800 def phase(self):
801 return self._changectx.phase()
801 return self._changectx.phase()
802 def phasestr(self):
802 def phasestr(self):
803 return self._changectx.phasestr()
803 return self._changectx.phasestr()
804 def obsolete(self):
804 def obsolete(self):
805 return self._changectx.obsolete()
805 return self._changectx.obsolete()
806 def instabilities(self):
806 def instabilities(self):
807 return self._changectx.instabilities()
807 return self._changectx.instabilities()
808 def manifest(self):
808 def manifest(self):
809 return self._changectx.manifest()
809 return self._changectx.manifest()
810 def changectx(self):
810 def changectx(self):
811 return self._changectx
811 return self._changectx
812 def renamed(self):
812 def renamed(self):
813 return self._copied
813 return self._copied
814 def repo(self):
814 def repo(self):
815 return self._repo
815 return self._repo
816 def size(self):
816 def size(self):
817 return len(self.data())
817 return len(self.data())
818
818
819 def path(self):
819 def path(self):
820 return self._path
820 return self._path
821
821
822 def isbinary(self):
822 def isbinary(self):
823 try:
823 try:
824 return util.binary(self.data())
824 return util.binary(self.data())
825 except IOError:
825 except IOError:
826 return False
826 return False
827 def isexec(self):
827 def isexec(self):
828 return 'x' in self.flags()
828 return 'x' in self.flags()
829 def islink(self):
829 def islink(self):
830 return 'l' in self.flags()
830 return 'l' in self.flags()
831
831
832 def isabsent(self):
832 def isabsent(self):
833 """whether this filectx represents a file not in self._changectx
833 """whether this filectx represents a file not in self._changectx
834
834
835 This is mainly for merge code to detect change/delete conflicts. This is
835 This is mainly for merge code to detect change/delete conflicts. This is
836 expected to be True for all subclasses of basectx."""
836 expected to be True for all subclasses of basectx."""
837 return False
837 return False
838
838
839 _customcmp = False
839 _customcmp = False
840 def cmp(self, fctx):
840 def cmp(self, fctx):
841 """compare with other file context
841 """compare with other file context
842
842
843 returns True if different than fctx.
843 returns True if different than fctx.
844 """
844 """
845 if fctx._customcmp:
845 if fctx._customcmp:
846 return fctx.cmp(self)
846 return fctx.cmp(self)
847
847
848 if (fctx._filenode is None
848 if (fctx._filenode is None
849 and (self._repo._encodefilterpats
849 and (self._repo._encodefilterpats
850 # if file data starts with '\1\n', empty metadata block is
850 # if file data starts with '\1\n', empty metadata block is
851 # prepended, which adds 4 bytes to filelog.size().
851 # prepended, which adds 4 bytes to filelog.size().
852 or self.size() - 4 == fctx.size())
852 or self.size() - 4 == fctx.size())
853 or self.size() == fctx.size()):
853 or self.size() == fctx.size()):
854 return self._filelog.cmp(self._filenode, fctx.data())
854 return self._filelog.cmp(self._filenode, fctx.data())
855
855
856 return True
856 return True
857
857
858 def _adjustlinkrev(self, srcrev, inclusive=False):
858 def _adjustlinkrev(self, srcrev, inclusive=False):
859 """return the first ancestor of <srcrev> introducing <fnode>
859 """return the first ancestor of <srcrev> introducing <fnode>
860
860
861 If the linkrev of the file revision does not point to an ancestor of
861 If the linkrev of the file revision does not point to an ancestor of
862 srcrev, we'll walk down the ancestors until we find one introducing
862 srcrev, we'll walk down the ancestors until we find one introducing
863 this file revision.
863 this file revision.
864
864
865 :srcrev: the changeset revision we search ancestors from
865 :srcrev: the changeset revision we search ancestors from
866 :inclusive: if true, the src revision will also be checked
866 :inclusive: if true, the src revision will also be checked
867 """
867 """
868 repo = self._repo
868 repo = self._repo
869 cl = repo.unfiltered().changelog
869 cl = repo.unfiltered().changelog
870 mfl = repo.manifestlog
870 mfl = repo.manifestlog
871 # fetch the linkrev
871 # fetch the linkrev
872 lkr = self.linkrev()
872 lkr = self.linkrev()
873 # hack to reuse ancestor computation when searching for renames
873 # hack to reuse ancestor computation when searching for renames
874 memberanc = getattr(self, '_ancestrycontext', None)
874 memberanc = getattr(self, '_ancestrycontext', None)
875 iteranc = None
875 iteranc = None
876 if srcrev is None:
876 if srcrev is None:
877 # wctx case, used by workingfilectx during mergecopy
877 # wctx case, used by workingfilectx during mergecopy
878 revs = [p.rev() for p in self._repo[None].parents()]
878 revs = [p.rev() for p in self._repo[None].parents()]
879 inclusive = True # we skipped the real (revless) source
879 inclusive = True # we skipped the real (revless) source
880 else:
880 else:
881 revs = [srcrev]
881 revs = [srcrev]
882 if memberanc is None:
882 if memberanc is None:
883 memberanc = iteranc = cl.ancestors(revs, lkr,
883 memberanc = iteranc = cl.ancestors(revs, lkr,
884 inclusive=inclusive)
884 inclusive=inclusive)
885 # check if this linkrev is an ancestor of srcrev
885 # check if this linkrev is an ancestor of srcrev
886 if lkr not in memberanc:
886 if lkr not in memberanc:
887 if iteranc is None:
887 if iteranc is None:
888 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
888 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
889 fnode = self._filenode
889 fnode = self._filenode
890 path = self._path
890 path = self._path
891 for a in iteranc:
891 for a in iteranc:
892 ac = cl.read(a) # get changeset data (we avoid object creation)
892 ac = cl.read(a) # get changeset data (we avoid object creation)
893 if path in ac[3]: # checking the 'files' field.
893 if path in ac[3]: # checking the 'files' field.
894 # The file has been touched, check if the content is
894 # The file has been touched, check if the content is
895 # similar to the one we search for.
895 # similar to the one we search for.
896 if fnode == mfl[ac[0]].readfast().get(path):
896 if fnode == mfl[ac[0]].readfast().get(path):
897 return a
897 return a
898 # In theory, we should never get out of that loop without a result.
898 # In theory, we should never get out of that loop without a result.
899 # But if manifest uses a buggy file revision (not children of the
899 # But if manifest uses a buggy file revision (not children of the
900 # one it replaces) we could. Such a buggy situation will likely
900 # one it replaces) we could. Such a buggy situation will likely
901 # result is crash somewhere else at to some point.
901 # result is crash somewhere else at to some point.
902 return lkr
902 return lkr
903
903
904 def introrev(self):
904 def introrev(self):
905 """return the rev of the changeset which introduced this file revision
905 """return the rev of the changeset which introduced this file revision
906
906
907 This method is different from linkrev because it take into account the
907 This method is different from linkrev because it take into account the
908 changeset the filectx was created from. It ensures the returned
908 changeset the filectx was created from. It ensures the returned
909 revision is one of its ancestors. This prevents bugs from
909 revision is one of its ancestors. This prevents bugs from
910 'linkrev-shadowing' when a file revision is used by multiple
910 'linkrev-shadowing' when a file revision is used by multiple
911 changesets.
911 changesets.
912 """
912 """
913 lkr = self.linkrev()
913 lkr = self.linkrev()
914 attrs = vars(self)
914 attrs = vars(self)
915 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
915 noctx = not (r'_changeid' in attrs or r'_changectx' in attrs)
916 if noctx or self.rev() == lkr:
916 if noctx or self.rev() == lkr:
917 return self.linkrev()
917 return self.linkrev()
918 return self._adjustlinkrev(self.rev(), inclusive=True)
918 return self._adjustlinkrev(self.rev(), inclusive=True)
919
919
920 def introfilectx(self):
920 def introfilectx(self):
921 """Return filectx having identical contents, but pointing to the
921 """Return filectx having identical contents, but pointing to the
922 changeset revision where this filectx was introduced"""
922 changeset revision where this filectx was introduced"""
923 introrev = self.introrev()
923 introrev = self.introrev()
924 if self.rev() == introrev:
924 if self.rev() == introrev:
925 return self
925 return self
926 return self.filectx(self.filenode(), changeid=introrev)
926 return self.filectx(self.filenode(), changeid=introrev)
927
927
928 def _parentfilectx(self, path, fileid, filelog):
928 def _parentfilectx(self, path, fileid, filelog):
929 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
929 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
930 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
930 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
931 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
931 if r'_changeid' in vars(self) or r'_changectx' in vars(self):
932 # If self is associated with a changeset (probably explicitly
932 # If self is associated with a changeset (probably explicitly
933 # fed), ensure the created filectx is associated with a
933 # fed), ensure the created filectx is associated with a
934 # changeset that is an ancestor of self.changectx.
934 # changeset that is an ancestor of self.changectx.
935 # This lets us later use _adjustlinkrev to get a correct link.
935 # This lets us later use _adjustlinkrev to get a correct link.
936 fctx._descendantrev = self.rev()
936 fctx._descendantrev = self.rev()
937 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
937 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
938 elif r'_descendantrev' in vars(self):
938 elif r'_descendantrev' in vars(self):
939 # Otherwise propagate _descendantrev if we have one associated.
939 # Otherwise propagate _descendantrev if we have one associated.
940 fctx._descendantrev = self._descendantrev
940 fctx._descendantrev = self._descendantrev
941 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
941 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
942 return fctx
942 return fctx
943
943
944 def parents(self):
944 def parents(self):
945 _path = self._path
945 _path = self._path
946 fl = self._filelog
946 fl = self._filelog
947 parents = self._filelog.parents(self._filenode)
947 parents = self._filelog.parents(self._filenode)
948 pl = [(_path, node, fl) for node in parents if node != nullid]
948 pl = [(_path, node, fl) for node in parents if node != nullid]
949
949
950 r = fl.renamed(self._filenode)
950 r = fl.renamed(self._filenode)
951 if r:
951 if r:
952 # - In the simple rename case, both parent are nullid, pl is empty.
952 # - In the simple rename case, both parent are nullid, pl is empty.
953 # - In case of merge, only one of the parent is null id and should
953 # - In case of merge, only one of the parent is null id and should
954 # be replaced with the rename information. This parent is -always-
954 # be replaced with the rename information. This parent is -always-
955 # the first one.
955 # the first one.
956 #
956 #
957 # As null id have always been filtered out in the previous list
957 # As null id have always been filtered out in the previous list
958 # comprehension, inserting to 0 will always result in "replacing
958 # comprehension, inserting to 0 will always result in "replacing
959 # first nullid parent with rename information.
959 # first nullid parent with rename information.
960 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
960 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
961
961
962 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
962 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
963
963
964 def p1(self):
964 def p1(self):
965 return self.parents()[0]
965 return self.parents()[0]
966
966
967 def p2(self):
967 def p2(self):
968 p = self.parents()
968 p = self.parents()
969 if len(p) == 2:
969 if len(p) == 2:
970 return p[1]
970 return p[1]
971 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
971 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
972
972
973 def annotate(self, follow=False, linenumber=False, skiprevs=None,
973 def annotate(self, follow=False, linenumber=False, skiprevs=None,
974 diffopts=None):
974 diffopts=None):
975 '''returns a list of tuples of ((ctx, number), line) for each line
975 '''returns a list of tuples of ((ctx, number), line) for each line
976 in the file, where ctx is the filectx of the node where
976 in the file, where ctx is the filectx of the node where
977 that line was last changed; if linenumber parameter is true, number is
977 that line was last changed; if linenumber parameter is true, number is
978 the line number at the first appearance in the managed file, otherwise,
978 the line number at the first appearance in the managed file, otherwise,
979 number has a fixed value of False.
979 number has a fixed value of False.
980 '''
980 '''
981
981
982 def lines(text):
982 def lines(text):
983 if text.endswith("\n"):
983 if text.endswith("\n"):
984 return text.count("\n")
984 return text.count("\n")
985 return text.count("\n") + int(bool(text))
985 return text.count("\n") + int(bool(text))
986
986
987 if linenumber:
987 if linenumber:
988 def decorate(text, rev):
988 def decorate(text, rev):
989 return ([annotateline(fctx=rev, lineno=i)
989 return ([annotateline(fctx=rev, lineno=i)
990 for i in xrange(1, lines(text) + 1)], text)
990 for i in xrange(1, lines(text) + 1)], text)
991 else:
991 else:
992 def decorate(text, rev):
992 def decorate(text, rev):
993 return ([annotateline(fctx=rev)] * lines(text), text)
993 return ([annotateline(fctx=rev)] * lines(text), text)
994
994
995 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
995 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
996
996
997 def parents(f):
997 def parents(f):
998 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
998 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
999 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
999 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
1000 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1000 # from the topmost introrev (= srcrev) down to p.linkrev() if it
1001 # isn't an ancestor of the srcrev.
1001 # isn't an ancestor of the srcrev.
1002 f._changeid
1002 f._changeid
1003 pl = f.parents()
1003 pl = f.parents()
1004
1004
1005 # Don't return renamed parents if we aren't following.
1005 # Don't return renamed parents if we aren't following.
1006 if not follow:
1006 if not follow:
1007 pl = [p for p in pl if p.path() == f.path()]
1007 pl = [p for p in pl if p.path() == f.path()]
1008
1008
1009 # renamed filectx won't have a filelog yet, so set it
1009 # renamed filectx won't have a filelog yet, so set it
1010 # from the cache to save time
1010 # from the cache to save time
1011 for p in pl:
1011 for p in pl:
1012 if not r'_filelog' in p.__dict__:
1012 if not r'_filelog' in p.__dict__:
1013 p._filelog = getlog(p.path())
1013 p._filelog = getlog(p.path())
1014
1014
1015 return pl
1015 return pl
1016
1016
1017 # use linkrev to find the first changeset where self appeared
1017 # use linkrev to find the first changeset where self appeared
1018 base = self.introfilectx()
1018 base = self.introfilectx()
1019 if getattr(base, '_ancestrycontext', None) is None:
1019 if getattr(base, '_ancestrycontext', None) is None:
1020 cl = self._repo.changelog
1020 cl = self._repo.changelog
1021 if base.rev() is None:
1021 if base.rev() is None:
1022 # wctx is not inclusive, but works because _ancestrycontext
1022 # wctx is not inclusive, but works because _ancestrycontext
1023 # is used to test filelog revisions
1023 # is used to test filelog revisions
1024 ac = cl.ancestors([p.rev() for p in base.parents()],
1024 ac = cl.ancestors([p.rev() for p in base.parents()],
1025 inclusive=True)
1025 inclusive=True)
1026 else:
1026 else:
1027 ac = cl.ancestors([base.rev()], inclusive=True)
1027 ac = cl.ancestors([base.rev()], inclusive=True)
1028 base._ancestrycontext = ac
1028 base._ancestrycontext = ac
1029
1029
1030 # This algorithm would prefer to be recursive, but Python is a
1030 # This algorithm would prefer to be recursive, but Python is a
1031 # bit recursion-hostile. Instead we do an iterative
1031 # bit recursion-hostile. Instead we do an iterative
1032 # depth-first search.
1032 # depth-first search.
1033
1033
1034 # 1st DFS pre-calculates pcache and needed
1034 # 1st DFS pre-calculates pcache and needed
1035 visit = [base]
1035 visit = [base]
1036 pcache = {}
1036 pcache = {}
1037 needed = {base: 1}
1037 needed = {base: 1}
1038 while visit:
1038 while visit:
1039 f = visit.pop()
1039 f = visit.pop()
1040 if f in pcache:
1040 if f in pcache:
1041 continue
1041 continue
1042 pl = parents(f)
1042 pl = parents(f)
1043 pcache[f] = pl
1043 pcache[f] = pl
1044 for p in pl:
1044 for p in pl:
1045 needed[p] = needed.get(p, 0) + 1
1045 needed[p] = needed.get(p, 0) + 1
1046 if p not in pcache:
1046 if p not in pcache:
1047 visit.append(p)
1047 visit.append(p)
1048
1048
1049 # 2nd DFS does the actual annotate
1049 # 2nd DFS does the actual annotate
1050 visit[:] = [base]
1050 visit[:] = [base]
1051 hist = {}
1051 hist = {}
1052 while visit:
1052 while visit:
1053 f = visit[-1]
1053 f = visit[-1]
1054 if f in hist:
1054 if f in hist:
1055 visit.pop()
1055 visit.pop()
1056 continue
1056 continue
1057
1057
1058 ready = True
1058 ready = True
1059 pl = pcache[f]
1059 pl = pcache[f]
1060 for p in pl:
1060 for p in pl:
1061 if p not in hist:
1061 if p not in hist:
1062 ready = False
1062 ready = False
1063 visit.append(p)
1063 visit.append(p)
1064 if ready:
1064 if ready:
1065 visit.pop()
1065 visit.pop()
1066 curr = decorate(f.data(), f)
1066 curr = decorate(f.data(), f)
1067 skipchild = False
1067 skipchild = False
1068 if skiprevs is not None:
1068 if skiprevs is not None:
1069 skipchild = f._changeid in skiprevs
1069 skipchild = f._changeid in skiprevs
1070 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1070 curr = _annotatepair([hist[p] for p in pl], f, curr, skipchild,
1071 diffopts)
1071 diffopts)
1072 for p in pl:
1072 for p in pl:
1073 if needed[p] == 1:
1073 if needed[p] == 1:
1074 del hist[p]
1074 del hist[p]
1075 del needed[p]
1075 del needed[p]
1076 else:
1076 else:
1077 needed[p] -= 1
1077 needed[p] -= 1
1078
1078
1079 hist[f] = curr
1079 hist[f] = curr
1080 del pcache[f]
1080 del pcache[f]
1081
1081
1082 return pycompat.ziplist(hist[base][0], hist[base][1].splitlines(True))
1082 lineattrs, text = hist[base]
1083 return pycompat.ziplist(lineattrs, mdiff.splitnewlines(text))
1083
1084
1084 def ancestors(self, followfirst=False):
1085 def ancestors(self, followfirst=False):
1085 visit = {}
1086 visit = {}
1086 c = self
1087 c = self
1087 if followfirst:
1088 if followfirst:
1088 cut = 1
1089 cut = 1
1089 else:
1090 else:
1090 cut = None
1091 cut = None
1091
1092
1092 while True:
1093 while True:
1093 for parent in c.parents()[:cut]:
1094 for parent in c.parents()[:cut]:
1094 visit[(parent.linkrev(), parent.filenode())] = parent
1095 visit[(parent.linkrev(), parent.filenode())] = parent
1095 if not visit:
1096 if not visit:
1096 break
1097 break
1097 c = visit.pop(max(visit))
1098 c = visit.pop(max(visit))
1098 yield c
1099 yield c
1099
1100
1100 def decodeddata(self):
1101 def decodeddata(self):
1101 """Returns `data()` after running repository decoding filters.
1102 """Returns `data()` after running repository decoding filters.
1102
1103
1103 This is often equivalent to how the data would be expressed on disk.
1104 This is often equivalent to how the data would be expressed on disk.
1104 """
1105 """
1105 return self._repo.wwritedata(self.path(), self.data())
1106 return self._repo.wwritedata(self.path(), self.data())
1106
1107
1107 @attr.s(slots=True, frozen=True)
1108 @attr.s(slots=True, frozen=True)
1108 class annotateline(object):
1109 class annotateline(object):
1109 fctx = attr.ib()
1110 fctx = attr.ib()
1110 lineno = attr.ib(default=False)
1111 lineno = attr.ib(default=False)
1111 # Whether this annotation was the result of a skip-annotate.
1112 # Whether this annotation was the result of a skip-annotate.
1112 skip = attr.ib(default=False)
1113 skip = attr.ib(default=False)
1113
1114
1114 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1115 def _annotatepair(parents, childfctx, child, skipchild, diffopts):
1115 r'''
1116 r'''
1116 Given parent and child fctxes and annotate data for parents, for all lines
1117 Given parent and child fctxes and annotate data for parents, for all lines
1117 in either parent that match the child, annotate the child with the parent's
1118 in either parent that match the child, annotate the child with the parent's
1118 data.
1119 data.
1119
1120
1120 Additionally, if `skipchild` is True, replace all other lines with parent
1121 Additionally, if `skipchild` is True, replace all other lines with parent
1121 annotate data as well such that child is never blamed for any lines.
1122 annotate data as well such that child is never blamed for any lines.
1122
1123
1123 See test-annotate.py for unit tests.
1124 See test-annotate.py for unit tests.
1124 '''
1125 '''
1125 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1126 pblocks = [(parent, mdiff.allblocks(parent[1], child[1], opts=diffopts))
1126 for parent in parents]
1127 for parent in parents]
1127
1128
1128 if skipchild:
1129 if skipchild:
1129 # Need to iterate over the blocks twice -- make it a list
1130 # Need to iterate over the blocks twice -- make it a list
1130 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1131 pblocks = [(p, list(blocks)) for (p, blocks) in pblocks]
1131 # Mercurial currently prefers p2 over p1 for annotate.
1132 # Mercurial currently prefers p2 over p1 for annotate.
1132 # TODO: change this?
1133 # TODO: change this?
1133 for parent, blocks in pblocks:
1134 for parent, blocks in pblocks:
1134 for (a1, a2, b1, b2), t in blocks:
1135 for (a1, a2, b1, b2), t in blocks:
1135 # Changed blocks ('!') or blocks made only of blank lines ('~')
1136 # Changed blocks ('!') or blocks made only of blank lines ('~')
1136 # belong to the child.
1137 # belong to the child.
1137 if t == '=':
1138 if t == '=':
1138 child[0][b1:b2] = parent[0][a1:a2]
1139 child[0][b1:b2] = parent[0][a1:a2]
1139
1140
1140 if skipchild:
1141 if skipchild:
1141 # Now try and match up anything that couldn't be matched,
1142 # Now try and match up anything that couldn't be matched,
1142 # Reversing pblocks maintains bias towards p2, matching above
1143 # Reversing pblocks maintains bias towards p2, matching above
1143 # behavior.
1144 # behavior.
1144 pblocks.reverse()
1145 pblocks.reverse()
1145
1146
1146 # The heuristics are:
1147 # The heuristics are:
1147 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1148 # * Work on blocks of changed lines (effectively diff hunks with -U0).
1148 # This could potentially be smarter but works well enough.
1149 # This could potentially be smarter but works well enough.
1149 # * For a non-matching section, do a best-effort fit. Match lines in
1150 # * For a non-matching section, do a best-effort fit. Match lines in
1150 # diff hunks 1:1, dropping lines as necessary.
1151 # diff hunks 1:1, dropping lines as necessary.
1151 # * Repeat the last line as a last resort.
1152 # * Repeat the last line as a last resort.
1152
1153
1153 # First, replace as much as possible without repeating the last line.
1154 # First, replace as much as possible without repeating the last line.
1154 remaining = [(parent, []) for parent, _blocks in pblocks]
1155 remaining = [(parent, []) for parent, _blocks in pblocks]
1155 for idx, (parent, blocks) in enumerate(pblocks):
1156 for idx, (parent, blocks) in enumerate(pblocks):
1156 for (a1, a2, b1, b2), _t in blocks:
1157 for (a1, a2, b1, b2), _t in blocks:
1157 if a2 - a1 >= b2 - b1:
1158 if a2 - a1 >= b2 - b1:
1158 for bk in xrange(b1, b2):
1159 for bk in xrange(b1, b2):
1159 if child[0][bk].fctx == childfctx:
1160 if child[0][bk].fctx == childfctx:
1160 ak = min(a1 + (bk - b1), a2 - 1)
1161 ak = min(a1 + (bk - b1), a2 - 1)
1161 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1162 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1162 else:
1163 else:
1163 remaining[idx][1].append((a1, a2, b1, b2))
1164 remaining[idx][1].append((a1, a2, b1, b2))
1164
1165
1165 # Then, look at anything left, which might involve repeating the last
1166 # Then, look at anything left, which might involve repeating the last
1166 # line.
1167 # line.
1167 for parent, blocks in remaining:
1168 for parent, blocks in remaining:
1168 for a1, a2, b1, b2 in blocks:
1169 for a1, a2, b1, b2 in blocks:
1169 for bk in xrange(b1, b2):
1170 for bk in xrange(b1, b2):
1170 if child[0][bk].fctx == childfctx:
1171 if child[0][bk].fctx == childfctx:
1171 ak = min(a1 + (bk - b1), a2 - 1)
1172 ak = min(a1 + (bk - b1), a2 - 1)
1172 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1173 child[0][bk] = attr.evolve(parent[0][ak], skip=True)
1173 return child
1174 return child
1174
1175
1175 class filectx(basefilectx):
1176 class filectx(basefilectx):
1176 """A filecontext object makes access to data related to a particular
1177 """A filecontext object makes access to data related to a particular
1177 filerevision convenient."""
1178 filerevision convenient."""
1178 def __init__(self, repo, path, changeid=None, fileid=None,
1179 def __init__(self, repo, path, changeid=None, fileid=None,
1179 filelog=None, changectx=None):
1180 filelog=None, changectx=None):
1180 """changeid can be a changeset revision, node, or tag.
1181 """changeid can be a changeset revision, node, or tag.
1181 fileid can be a file revision or node."""
1182 fileid can be a file revision or node."""
1182 self._repo = repo
1183 self._repo = repo
1183 self._path = path
1184 self._path = path
1184
1185
1185 assert (changeid is not None
1186 assert (changeid is not None
1186 or fileid is not None
1187 or fileid is not None
1187 or changectx is not None), \
1188 or changectx is not None), \
1188 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1189 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1189 % (changeid, fileid, changectx))
1190 % (changeid, fileid, changectx))
1190
1191
1191 if filelog is not None:
1192 if filelog is not None:
1192 self._filelog = filelog
1193 self._filelog = filelog
1193
1194
1194 if changeid is not None:
1195 if changeid is not None:
1195 self._changeid = changeid
1196 self._changeid = changeid
1196 if changectx is not None:
1197 if changectx is not None:
1197 self._changectx = changectx
1198 self._changectx = changectx
1198 if fileid is not None:
1199 if fileid is not None:
1199 self._fileid = fileid
1200 self._fileid = fileid
1200
1201
1201 @propertycache
1202 @propertycache
1202 def _changectx(self):
1203 def _changectx(self):
1203 try:
1204 try:
1204 return changectx(self._repo, self._changeid)
1205 return changectx(self._repo, self._changeid)
1205 except error.FilteredRepoLookupError:
1206 except error.FilteredRepoLookupError:
1206 # Linkrev may point to any revision in the repository. When the
1207 # Linkrev may point to any revision in the repository. When the
1207 # repository is filtered this may lead to `filectx` trying to build
1208 # repository is filtered this may lead to `filectx` trying to build
1208 # `changectx` for filtered revision. In such case we fallback to
1209 # `changectx` for filtered revision. In such case we fallback to
1209 # creating `changectx` on the unfiltered version of the reposition.
1210 # creating `changectx` on the unfiltered version of the reposition.
1210 # This fallback should not be an issue because `changectx` from
1211 # This fallback should not be an issue because `changectx` from
1211 # `filectx` are not used in complex operations that care about
1212 # `filectx` are not used in complex operations that care about
1212 # filtering.
1213 # filtering.
1213 #
1214 #
1214 # This fallback is a cheap and dirty fix that prevent several
1215 # This fallback is a cheap and dirty fix that prevent several
1215 # crashes. It does not ensure the behavior is correct. However the
1216 # crashes. It does not ensure the behavior is correct. However the
1216 # behavior was not correct before filtering either and "incorrect
1217 # behavior was not correct before filtering either and "incorrect
1217 # behavior" is seen as better as "crash"
1218 # behavior" is seen as better as "crash"
1218 #
1219 #
1219 # Linkrevs have several serious troubles with filtering that are
1220 # Linkrevs have several serious troubles with filtering that are
1220 # complicated to solve. Proper handling of the issue here should be
1221 # complicated to solve. Proper handling of the issue here should be
1221 # considered when solving linkrev issue are on the table.
1222 # considered when solving linkrev issue are on the table.
1222 return changectx(self._repo.unfiltered(), self._changeid)
1223 return changectx(self._repo.unfiltered(), self._changeid)
1223
1224
1224 def filectx(self, fileid, changeid=None):
1225 def filectx(self, fileid, changeid=None):
1225 '''opens an arbitrary revision of the file without
1226 '''opens an arbitrary revision of the file without
1226 opening a new filelog'''
1227 opening a new filelog'''
1227 return filectx(self._repo, self._path, fileid=fileid,
1228 return filectx(self._repo, self._path, fileid=fileid,
1228 filelog=self._filelog, changeid=changeid)
1229 filelog=self._filelog, changeid=changeid)
1229
1230
1230 def rawdata(self):
1231 def rawdata(self):
1231 return self._filelog.revision(self._filenode, raw=True)
1232 return self._filelog.revision(self._filenode, raw=True)
1232
1233
1233 def rawflags(self):
1234 def rawflags(self):
1234 """low-level revlog flags"""
1235 """low-level revlog flags"""
1235 return self._filelog.flags(self._filerev)
1236 return self._filelog.flags(self._filerev)
1236
1237
1237 def data(self):
1238 def data(self):
1238 try:
1239 try:
1239 return self._filelog.read(self._filenode)
1240 return self._filelog.read(self._filenode)
1240 except error.CensoredNodeError:
1241 except error.CensoredNodeError:
1241 if self._repo.ui.config("censor", "policy") == "ignore":
1242 if self._repo.ui.config("censor", "policy") == "ignore":
1242 return ""
1243 return ""
1243 raise error.Abort(_("censored node: %s") % short(self._filenode),
1244 raise error.Abort(_("censored node: %s") % short(self._filenode),
1244 hint=_("set censor.policy to ignore errors"))
1245 hint=_("set censor.policy to ignore errors"))
1245
1246
1246 def size(self):
1247 def size(self):
1247 return self._filelog.size(self._filerev)
1248 return self._filelog.size(self._filerev)
1248
1249
1249 @propertycache
1250 @propertycache
1250 def _copied(self):
1251 def _copied(self):
1251 """check if file was actually renamed in this changeset revision
1252 """check if file was actually renamed in this changeset revision
1252
1253
1253 If rename logged in file revision, we report copy for changeset only
1254 If rename logged in file revision, we report copy for changeset only
1254 if file revisions linkrev points back to the changeset in question
1255 if file revisions linkrev points back to the changeset in question
1255 or both changeset parents contain different file revisions.
1256 or both changeset parents contain different file revisions.
1256 """
1257 """
1257
1258
1258 renamed = self._filelog.renamed(self._filenode)
1259 renamed = self._filelog.renamed(self._filenode)
1259 if not renamed:
1260 if not renamed:
1260 return renamed
1261 return renamed
1261
1262
1262 if self.rev() == self.linkrev():
1263 if self.rev() == self.linkrev():
1263 return renamed
1264 return renamed
1264
1265
1265 name = self.path()
1266 name = self.path()
1266 fnode = self._filenode
1267 fnode = self._filenode
1267 for p in self._changectx.parents():
1268 for p in self._changectx.parents():
1268 try:
1269 try:
1269 if fnode == p.filenode(name):
1270 if fnode == p.filenode(name):
1270 return None
1271 return None
1271 except error.LookupError:
1272 except error.LookupError:
1272 pass
1273 pass
1273 return renamed
1274 return renamed
1274
1275
1275 def children(self):
1276 def children(self):
1276 # hard for renames
1277 # hard for renames
1277 c = self._filelog.children(self._filenode)
1278 c = self._filelog.children(self._filenode)
1278 return [filectx(self._repo, self._path, fileid=x,
1279 return [filectx(self._repo, self._path, fileid=x,
1279 filelog=self._filelog) for x in c]
1280 filelog=self._filelog) for x in c]
1280
1281
1281 class committablectx(basectx):
1282 class committablectx(basectx):
1282 """A committablectx object provides common functionality for a context that
1283 """A committablectx object provides common functionality for a context that
1283 wants the ability to commit, e.g. workingctx or memctx."""
1284 wants the ability to commit, e.g. workingctx or memctx."""
1284 def __init__(self, repo, text="", user=None, date=None, extra=None,
1285 def __init__(self, repo, text="", user=None, date=None, extra=None,
1285 changes=None):
1286 changes=None):
1286 self._repo = repo
1287 self._repo = repo
1287 self._rev = None
1288 self._rev = None
1288 self._node = None
1289 self._node = None
1289 self._text = text
1290 self._text = text
1290 if date:
1291 if date:
1291 self._date = dateutil.parsedate(date)
1292 self._date = dateutil.parsedate(date)
1292 if user:
1293 if user:
1293 self._user = user
1294 self._user = user
1294 if changes:
1295 if changes:
1295 self._status = changes
1296 self._status = changes
1296
1297
1297 self._extra = {}
1298 self._extra = {}
1298 if extra:
1299 if extra:
1299 self._extra = extra.copy()
1300 self._extra = extra.copy()
1300 if 'branch' not in self._extra:
1301 if 'branch' not in self._extra:
1301 try:
1302 try:
1302 branch = encoding.fromlocal(self._repo.dirstate.branch())
1303 branch = encoding.fromlocal(self._repo.dirstate.branch())
1303 except UnicodeDecodeError:
1304 except UnicodeDecodeError:
1304 raise error.Abort(_('branch name not in UTF-8!'))
1305 raise error.Abort(_('branch name not in UTF-8!'))
1305 self._extra['branch'] = branch
1306 self._extra['branch'] = branch
1306 if self._extra['branch'] == '':
1307 if self._extra['branch'] == '':
1307 self._extra['branch'] = 'default'
1308 self._extra['branch'] = 'default'
1308
1309
1309 def __bytes__(self):
1310 def __bytes__(self):
1310 return bytes(self._parents[0]) + "+"
1311 return bytes(self._parents[0]) + "+"
1311
1312
1312 __str__ = encoding.strmethod(__bytes__)
1313 __str__ = encoding.strmethod(__bytes__)
1313
1314
1314 def __nonzero__(self):
1315 def __nonzero__(self):
1315 return True
1316 return True
1316
1317
1317 __bool__ = __nonzero__
1318 __bool__ = __nonzero__
1318
1319
1319 def _buildflagfunc(self):
1320 def _buildflagfunc(self):
1320 # Create a fallback function for getting file flags when the
1321 # Create a fallback function for getting file flags when the
1321 # filesystem doesn't support them
1322 # filesystem doesn't support them
1322
1323
1323 copiesget = self._repo.dirstate.copies().get
1324 copiesget = self._repo.dirstate.copies().get
1324 parents = self.parents()
1325 parents = self.parents()
1325 if len(parents) < 2:
1326 if len(parents) < 2:
1326 # when we have one parent, it's easy: copy from parent
1327 # when we have one parent, it's easy: copy from parent
1327 man = parents[0].manifest()
1328 man = parents[0].manifest()
1328 def func(f):
1329 def func(f):
1329 f = copiesget(f, f)
1330 f = copiesget(f, f)
1330 return man.flags(f)
1331 return man.flags(f)
1331 else:
1332 else:
1332 # merges are tricky: we try to reconstruct the unstored
1333 # merges are tricky: we try to reconstruct the unstored
1333 # result from the merge (issue1802)
1334 # result from the merge (issue1802)
1334 p1, p2 = parents
1335 p1, p2 = parents
1335 pa = p1.ancestor(p2)
1336 pa = p1.ancestor(p2)
1336 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1337 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1337
1338
1338 def func(f):
1339 def func(f):
1339 f = copiesget(f, f) # may be wrong for merges with copies
1340 f = copiesget(f, f) # may be wrong for merges with copies
1340 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1341 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1341 if fl1 == fl2:
1342 if fl1 == fl2:
1342 return fl1
1343 return fl1
1343 if fl1 == fla:
1344 if fl1 == fla:
1344 return fl2
1345 return fl2
1345 if fl2 == fla:
1346 if fl2 == fla:
1346 return fl1
1347 return fl1
1347 return '' # punt for conflicts
1348 return '' # punt for conflicts
1348
1349
1349 return func
1350 return func
1350
1351
1351 @propertycache
1352 @propertycache
1352 def _flagfunc(self):
1353 def _flagfunc(self):
1353 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1354 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1354
1355
1355 @propertycache
1356 @propertycache
1356 def _status(self):
1357 def _status(self):
1357 return self._repo.status()
1358 return self._repo.status()
1358
1359
1359 @propertycache
1360 @propertycache
1360 def _user(self):
1361 def _user(self):
1361 return self._repo.ui.username()
1362 return self._repo.ui.username()
1362
1363
1363 @propertycache
1364 @propertycache
1364 def _date(self):
1365 def _date(self):
1365 ui = self._repo.ui
1366 ui = self._repo.ui
1366 date = ui.configdate('devel', 'default-date')
1367 date = ui.configdate('devel', 'default-date')
1367 if date is None:
1368 if date is None:
1368 date = dateutil.makedate()
1369 date = dateutil.makedate()
1369 return date
1370 return date
1370
1371
1371 def subrev(self, subpath):
1372 def subrev(self, subpath):
1372 return None
1373 return None
1373
1374
1374 def manifestnode(self):
1375 def manifestnode(self):
1375 return None
1376 return None
1376 def user(self):
1377 def user(self):
1377 return self._user or self._repo.ui.username()
1378 return self._user or self._repo.ui.username()
1378 def date(self):
1379 def date(self):
1379 return self._date
1380 return self._date
1380 def description(self):
1381 def description(self):
1381 return self._text
1382 return self._text
1382 def files(self):
1383 def files(self):
1383 return sorted(self._status.modified + self._status.added +
1384 return sorted(self._status.modified + self._status.added +
1384 self._status.removed)
1385 self._status.removed)
1385
1386
1386 def modified(self):
1387 def modified(self):
1387 return self._status.modified
1388 return self._status.modified
1388 def added(self):
1389 def added(self):
1389 return self._status.added
1390 return self._status.added
1390 def removed(self):
1391 def removed(self):
1391 return self._status.removed
1392 return self._status.removed
1392 def deleted(self):
1393 def deleted(self):
1393 return self._status.deleted
1394 return self._status.deleted
1394 def branch(self):
1395 def branch(self):
1395 return encoding.tolocal(self._extra['branch'])
1396 return encoding.tolocal(self._extra['branch'])
1396 def closesbranch(self):
1397 def closesbranch(self):
1397 return 'close' in self._extra
1398 return 'close' in self._extra
1398 def extra(self):
1399 def extra(self):
1399 return self._extra
1400 return self._extra
1400
1401
1401 def isinmemory(self):
1402 def isinmemory(self):
1402 return False
1403 return False
1403
1404
1404 def tags(self):
1405 def tags(self):
1405 return []
1406 return []
1406
1407
1407 def bookmarks(self):
1408 def bookmarks(self):
1408 b = []
1409 b = []
1409 for p in self.parents():
1410 for p in self.parents():
1410 b.extend(p.bookmarks())
1411 b.extend(p.bookmarks())
1411 return b
1412 return b
1412
1413
1413 def phase(self):
1414 def phase(self):
1414 phase = phases.draft # default phase to draft
1415 phase = phases.draft # default phase to draft
1415 for p in self.parents():
1416 for p in self.parents():
1416 phase = max(phase, p.phase())
1417 phase = max(phase, p.phase())
1417 return phase
1418 return phase
1418
1419
1419 def hidden(self):
1420 def hidden(self):
1420 return False
1421 return False
1421
1422
1422 def children(self):
1423 def children(self):
1423 return []
1424 return []
1424
1425
1425 def flags(self, path):
1426 def flags(self, path):
1426 if r'_manifest' in self.__dict__:
1427 if r'_manifest' in self.__dict__:
1427 try:
1428 try:
1428 return self._manifest.flags(path)
1429 return self._manifest.flags(path)
1429 except KeyError:
1430 except KeyError:
1430 return ''
1431 return ''
1431
1432
1432 try:
1433 try:
1433 return self._flagfunc(path)
1434 return self._flagfunc(path)
1434 except OSError:
1435 except OSError:
1435 return ''
1436 return ''
1436
1437
1437 def ancestor(self, c2):
1438 def ancestor(self, c2):
1438 """return the "best" ancestor context of self and c2"""
1439 """return the "best" ancestor context of self and c2"""
1439 return self._parents[0].ancestor(c2) # punt on two parents for now
1440 return self._parents[0].ancestor(c2) # punt on two parents for now
1440
1441
1441 def walk(self, match):
1442 def walk(self, match):
1442 '''Generates matching file names.'''
1443 '''Generates matching file names.'''
1443 return sorted(self._repo.dirstate.walk(match,
1444 return sorted(self._repo.dirstate.walk(match,
1444 subrepos=sorted(self.substate),
1445 subrepos=sorted(self.substate),
1445 unknown=True, ignored=False))
1446 unknown=True, ignored=False))
1446
1447
1447 def matches(self, match):
1448 def matches(self, match):
1448 return sorted(self._repo.dirstate.matches(match))
1449 return sorted(self._repo.dirstate.matches(match))
1449
1450
1450 def ancestors(self):
1451 def ancestors(self):
1451 for p in self._parents:
1452 for p in self._parents:
1452 yield p
1453 yield p
1453 for a in self._repo.changelog.ancestors(
1454 for a in self._repo.changelog.ancestors(
1454 [p.rev() for p in self._parents]):
1455 [p.rev() for p in self._parents]):
1455 yield changectx(self._repo, a)
1456 yield changectx(self._repo, a)
1456
1457
1457 def markcommitted(self, node):
1458 def markcommitted(self, node):
1458 """Perform post-commit cleanup necessary after committing this ctx
1459 """Perform post-commit cleanup necessary after committing this ctx
1459
1460
1460 Specifically, this updates backing stores this working context
1461 Specifically, this updates backing stores this working context
1461 wraps to reflect the fact that the changes reflected by this
1462 wraps to reflect the fact that the changes reflected by this
1462 workingctx have been committed. For example, it marks
1463 workingctx have been committed. For example, it marks
1463 modified and added files as normal in the dirstate.
1464 modified and added files as normal in the dirstate.
1464
1465
1465 """
1466 """
1466
1467
1467 with self._repo.dirstate.parentchange():
1468 with self._repo.dirstate.parentchange():
1468 for f in self.modified() + self.added():
1469 for f in self.modified() + self.added():
1469 self._repo.dirstate.normal(f)
1470 self._repo.dirstate.normal(f)
1470 for f in self.removed():
1471 for f in self.removed():
1471 self._repo.dirstate.drop(f)
1472 self._repo.dirstate.drop(f)
1472 self._repo.dirstate.setparents(node)
1473 self._repo.dirstate.setparents(node)
1473
1474
1474 # write changes out explicitly, because nesting wlock at
1475 # write changes out explicitly, because nesting wlock at
1475 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1476 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1476 # from immediately doing so for subsequent changing files
1477 # from immediately doing so for subsequent changing files
1477 self._repo.dirstate.write(self._repo.currenttransaction())
1478 self._repo.dirstate.write(self._repo.currenttransaction())
1478
1479
1479 def dirty(self, missing=False, merge=True, branch=True):
1480 def dirty(self, missing=False, merge=True, branch=True):
1480 return False
1481 return False
1481
1482
1482 class workingctx(committablectx):
1483 class workingctx(committablectx):
1483 """A workingctx object makes access to data related to
1484 """A workingctx object makes access to data related to
1484 the current working directory convenient.
1485 the current working directory convenient.
1485 date - any valid date string or (unixtime, offset), or None.
1486 date - any valid date string or (unixtime, offset), or None.
1486 user - username string, or None.
1487 user - username string, or None.
1487 extra - a dictionary of extra values, or None.
1488 extra - a dictionary of extra values, or None.
1488 changes - a list of file lists as returned by localrepo.status()
1489 changes - a list of file lists as returned by localrepo.status()
1489 or None to use the repository status.
1490 or None to use the repository status.
1490 """
1491 """
1491 def __init__(self, repo, text="", user=None, date=None, extra=None,
1492 def __init__(self, repo, text="", user=None, date=None, extra=None,
1492 changes=None):
1493 changes=None):
1493 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1494 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1494
1495
1495 def __iter__(self):
1496 def __iter__(self):
1496 d = self._repo.dirstate
1497 d = self._repo.dirstate
1497 for f in d:
1498 for f in d:
1498 if d[f] != 'r':
1499 if d[f] != 'r':
1499 yield f
1500 yield f
1500
1501
1501 def __contains__(self, key):
1502 def __contains__(self, key):
1502 return self._repo.dirstate[key] not in "?r"
1503 return self._repo.dirstate[key] not in "?r"
1503
1504
1504 def hex(self):
1505 def hex(self):
1505 return hex(wdirid)
1506 return hex(wdirid)
1506
1507
1507 @propertycache
1508 @propertycache
1508 def _parents(self):
1509 def _parents(self):
1509 p = self._repo.dirstate.parents()
1510 p = self._repo.dirstate.parents()
1510 if p[1] == nullid:
1511 if p[1] == nullid:
1511 p = p[:-1]
1512 p = p[:-1]
1512 return [changectx(self._repo, x) for x in p]
1513 return [changectx(self._repo, x) for x in p]
1513
1514
1514 def filectx(self, path, filelog=None):
1515 def filectx(self, path, filelog=None):
1515 """get a file context from the working directory"""
1516 """get a file context from the working directory"""
1516 return workingfilectx(self._repo, path, workingctx=self,
1517 return workingfilectx(self._repo, path, workingctx=self,
1517 filelog=filelog)
1518 filelog=filelog)
1518
1519
1519 def dirty(self, missing=False, merge=True, branch=True):
1520 def dirty(self, missing=False, merge=True, branch=True):
1520 "check whether a working directory is modified"
1521 "check whether a working directory is modified"
1521 # check subrepos first
1522 # check subrepos first
1522 for s in sorted(self.substate):
1523 for s in sorted(self.substate):
1523 if self.sub(s).dirty(missing=missing):
1524 if self.sub(s).dirty(missing=missing):
1524 return True
1525 return True
1525 # check current working dir
1526 # check current working dir
1526 return ((merge and self.p2()) or
1527 return ((merge and self.p2()) or
1527 (branch and self.branch() != self.p1().branch()) or
1528 (branch and self.branch() != self.p1().branch()) or
1528 self.modified() or self.added() or self.removed() or
1529 self.modified() or self.added() or self.removed() or
1529 (missing and self.deleted()))
1530 (missing and self.deleted()))
1530
1531
1531 def add(self, list, prefix=""):
1532 def add(self, list, prefix=""):
1532 with self._repo.wlock():
1533 with self._repo.wlock():
1533 ui, ds = self._repo.ui, self._repo.dirstate
1534 ui, ds = self._repo.ui, self._repo.dirstate
1534 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1535 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1535 rejected = []
1536 rejected = []
1536 lstat = self._repo.wvfs.lstat
1537 lstat = self._repo.wvfs.lstat
1537 for f in list:
1538 for f in list:
1538 # ds.pathto() returns an absolute file when this is invoked from
1539 # ds.pathto() returns an absolute file when this is invoked from
1539 # the keyword extension. That gets flagged as non-portable on
1540 # the keyword extension. That gets flagged as non-portable on
1540 # Windows, since it contains the drive letter and colon.
1541 # Windows, since it contains the drive letter and colon.
1541 scmutil.checkportable(ui, os.path.join(prefix, f))
1542 scmutil.checkportable(ui, os.path.join(prefix, f))
1542 try:
1543 try:
1543 st = lstat(f)
1544 st = lstat(f)
1544 except OSError:
1545 except OSError:
1545 ui.warn(_("%s does not exist!\n") % uipath(f))
1546 ui.warn(_("%s does not exist!\n") % uipath(f))
1546 rejected.append(f)
1547 rejected.append(f)
1547 continue
1548 continue
1548 if st.st_size > 10000000:
1549 if st.st_size > 10000000:
1549 ui.warn(_("%s: up to %d MB of RAM may be required "
1550 ui.warn(_("%s: up to %d MB of RAM may be required "
1550 "to manage this file\n"
1551 "to manage this file\n"
1551 "(use 'hg revert %s' to cancel the "
1552 "(use 'hg revert %s' to cancel the "
1552 "pending addition)\n")
1553 "pending addition)\n")
1553 % (f, 3 * st.st_size // 1000000, uipath(f)))
1554 % (f, 3 * st.st_size // 1000000, uipath(f)))
1554 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1555 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1555 ui.warn(_("%s not added: only files and symlinks "
1556 ui.warn(_("%s not added: only files and symlinks "
1556 "supported currently\n") % uipath(f))
1557 "supported currently\n") % uipath(f))
1557 rejected.append(f)
1558 rejected.append(f)
1558 elif ds[f] in 'amn':
1559 elif ds[f] in 'amn':
1559 ui.warn(_("%s already tracked!\n") % uipath(f))
1560 ui.warn(_("%s already tracked!\n") % uipath(f))
1560 elif ds[f] == 'r':
1561 elif ds[f] == 'r':
1561 ds.normallookup(f)
1562 ds.normallookup(f)
1562 else:
1563 else:
1563 ds.add(f)
1564 ds.add(f)
1564 return rejected
1565 return rejected
1565
1566
1566 def forget(self, files, prefix=""):
1567 def forget(self, files, prefix=""):
1567 with self._repo.wlock():
1568 with self._repo.wlock():
1568 ds = self._repo.dirstate
1569 ds = self._repo.dirstate
1569 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1570 uipath = lambda f: ds.pathto(pathutil.join(prefix, f))
1570 rejected = []
1571 rejected = []
1571 for f in files:
1572 for f in files:
1572 if f not in self._repo.dirstate:
1573 if f not in self._repo.dirstate:
1573 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1574 self._repo.ui.warn(_("%s not tracked!\n") % uipath(f))
1574 rejected.append(f)
1575 rejected.append(f)
1575 elif self._repo.dirstate[f] != 'a':
1576 elif self._repo.dirstate[f] != 'a':
1576 self._repo.dirstate.remove(f)
1577 self._repo.dirstate.remove(f)
1577 else:
1578 else:
1578 self._repo.dirstate.drop(f)
1579 self._repo.dirstate.drop(f)
1579 return rejected
1580 return rejected
1580
1581
1581 def undelete(self, list):
1582 def undelete(self, list):
1582 pctxs = self.parents()
1583 pctxs = self.parents()
1583 with self._repo.wlock():
1584 with self._repo.wlock():
1584 ds = self._repo.dirstate
1585 ds = self._repo.dirstate
1585 for f in list:
1586 for f in list:
1586 if self._repo.dirstate[f] != 'r':
1587 if self._repo.dirstate[f] != 'r':
1587 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1588 self._repo.ui.warn(_("%s not removed!\n") % ds.pathto(f))
1588 else:
1589 else:
1589 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1590 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1590 t = fctx.data()
1591 t = fctx.data()
1591 self._repo.wwrite(f, t, fctx.flags())
1592 self._repo.wwrite(f, t, fctx.flags())
1592 self._repo.dirstate.normal(f)
1593 self._repo.dirstate.normal(f)
1593
1594
1594 def copy(self, source, dest):
1595 def copy(self, source, dest):
1595 try:
1596 try:
1596 st = self._repo.wvfs.lstat(dest)
1597 st = self._repo.wvfs.lstat(dest)
1597 except OSError as err:
1598 except OSError as err:
1598 if err.errno != errno.ENOENT:
1599 if err.errno != errno.ENOENT:
1599 raise
1600 raise
1600 self._repo.ui.warn(_("%s does not exist!\n")
1601 self._repo.ui.warn(_("%s does not exist!\n")
1601 % self._repo.dirstate.pathto(dest))
1602 % self._repo.dirstate.pathto(dest))
1602 return
1603 return
1603 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1604 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1604 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1605 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1605 "symbolic link\n")
1606 "symbolic link\n")
1606 % self._repo.dirstate.pathto(dest))
1607 % self._repo.dirstate.pathto(dest))
1607 else:
1608 else:
1608 with self._repo.wlock():
1609 with self._repo.wlock():
1609 if self._repo.dirstate[dest] in '?':
1610 if self._repo.dirstate[dest] in '?':
1610 self._repo.dirstate.add(dest)
1611 self._repo.dirstate.add(dest)
1611 elif self._repo.dirstate[dest] in 'r':
1612 elif self._repo.dirstate[dest] in 'r':
1612 self._repo.dirstate.normallookup(dest)
1613 self._repo.dirstate.normallookup(dest)
1613 self._repo.dirstate.copy(source, dest)
1614 self._repo.dirstate.copy(source, dest)
1614
1615
1615 def match(self, pats=None, include=None, exclude=None, default='glob',
1616 def match(self, pats=None, include=None, exclude=None, default='glob',
1616 listsubrepos=False, badfn=None):
1617 listsubrepos=False, badfn=None):
1617 r = self._repo
1618 r = self._repo
1618
1619
1619 # Only a case insensitive filesystem needs magic to translate user input
1620 # Only a case insensitive filesystem needs magic to translate user input
1620 # to actual case in the filesystem.
1621 # to actual case in the filesystem.
1621 icasefs = not util.fscasesensitive(r.root)
1622 icasefs = not util.fscasesensitive(r.root)
1622 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1623 return matchmod.match(r.root, r.getcwd(), pats, include, exclude,
1623 default, auditor=r.auditor, ctx=self,
1624 default, auditor=r.auditor, ctx=self,
1624 listsubrepos=listsubrepos, badfn=badfn,
1625 listsubrepos=listsubrepos, badfn=badfn,
1625 icasefs=icasefs)
1626 icasefs=icasefs)
1626
1627
1627 def _filtersuspectsymlink(self, files):
1628 def _filtersuspectsymlink(self, files):
1628 if not files or self._repo.dirstate._checklink:
1629 if not files or self._repo.dirstate._checklink:
1629 return files
1630 return files
1630
1631
1631 # Symlink placeholders may get non-symlink-like contents
1632 # Symlink placeholders may get non-symlink-like contents
1632 # via user error or dereferencing by NFS or Samba servers,
1633 # via user error or dereferencing by NFS or Samba servers,
1633 # so we filter out any placeholders that don't look like a
1634 # so we filter out any placeholders that don't look like a
1634 # symlink
1635 # symlink
1635 sane = []
1636 sane = []
1636 for f in files:
1637 for f in files:
1637 if self.flags(f) == 'l':
1638 if self.flags(f) == 'l':
1638 d = self[f].data()
1639 d = self[f].data()
1639 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1640 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1640 self._repo.ui.debug('ignoring suspect symlink placeholder'
1641 self._repo.ui.debug('ignoring suspect symlink placeholder'
1641 ' "%s"\n' % f)
1642 ' "%s"\n' % f)
1642 continue
1643 continue
1643 sane.append(f)
1644 sane.append(f)
1644 return sane
1645 return sane
1645
1646
1646 def _checklookup(self, files):
1647 def _checklookup(self, files):
1647 # check for any possibly clean files
1648 # check for any possibly clean files
1648 if not files:
1649 if not files:
1649 return [], [], []
1650 return [], [], []
1650
1651
1651 modified = []
1652 modified = []
1652 deleted = []
1653 deleted = []
1653 fixup = []
1654 fixup = []
1654 pctx = self._parents[0]
1655 pctx = self._parents[0]
1655 # do a full compare of any files that might have changed
1656 # do a full compare of any files that might have changed
1656 for f in sorted(files):
1657 for f in sorted(files):
1657 try:
1658 try:
1658 # This will return True for a file that got replaced by a
1659 # This will return True for a file that got replaced by a
1659 # directory in the interim, but fixing that is pretty hard.
1660 # directory in the interim, but fixing that is pretty hard.
1660 if (f not in pctx or self.flags(f) != pctx.flags(f)
1661 if (f not in pctx or self.flags(f) != pctx.flags(f)
1661 or pctx[f].cmp(self[f])):
1662 or pctx[f].cmp(self[f])):
1662 modified.append(f)
1663 modified.append(f)
1663 else:
1664 else:
1664 fixup.append(f)
1665 fixup.append(f)
1665 except (IOError, OSError):
1666 except (IOError, OSError):
1666 # A file become inaccessible in between? Mark it as deleted,
1667 # A file become inaccessible in between? Mark it as deleted,
1667 # matching dirstate behavior (issue5584).
1668 # matching dirstate behavior (issue5584).
1668 # The dirstate has more complex behavior around whether a
1669 # The dirstate has more complex behavior around whether a
1669 # missing file matches a directory, etc, but we don't need to
1670 # missing file matches a directory, etc, but we don't need to
1670 # bother with that: if f has made it to this point, we're sure
1671 # bother with that: if f has made it to this point, we're sure
1671 # it's in the dirstate.
1672 # it's in the dirstate.
1672 deleted.append(f)
1673 deleted.append(f)
1673
1674
1674 return modified, deleted, fixup
1675 return modified, deleted, fixup
1675
1676
1676 def _poststatusfixup(self, status, fixup):
1677 def _poststatusfixup(self, status, fixup):
1677 """update dirstate for files that are actually clean"""
1678 """update dirstate for files that are actually clean"""
1678 poststatus = self._repo.postdsstatus()
1679 poststatus = self._repo.postdsstatus()
1679 if fixup or poststatus:
1680 if fixup or poststatus:
1680 try:
1681 try:
1681 oldid = self._repo.dirstate.identity()
1682 oldid = self._repo.dirstate.identity()
1682
1683
1683 # updating the dirstate is optional
1684 # updating the dirstate is optional
1684 # so we don't wait on the lock
1685 # so we don't wait on the lock
1685 # wlock can invalidate the dirstate, so cache normal _after_
1686 # wlock can invalidate the dirstate, so cache normal _after_
1686 # taking the lock
1687 # taking the lock
1687 with self._repo.wlock(False):
1688 with self._repo.wlock(False):
1688 if self._repo.dirstate.identity() == oldid:
1689 if self._repo.dirstate.identity() == oldid:
1689 if fixup:
1690 if fixup:
1690 normal = self._repo.dirstate.normal
1691 normal = self._repo.dirstate.normal
1691 for f in fixup:
1692 for f in fixup:
1692 normal(f)
1693 normal(f)
1693 # write changes out explicitly, because nesting
1694 # write changes out explicitly, because nesting
1694 # wlock at runtime may prevent 'wlock.release()'
1695 # wlock at runtime may prevent 'wlock.release()'
1695 # after this block from doing so for subsequent
1696 # after this block from doing so for subsequent
1696 # changing files
1697 # changing files
1697 tr = self._repo.currenttransaction()
1698 tr = self._repo.currenttransaction()
1698 self._repo.dirstate.write(tr)
1699 self._repo.dirstate.write(tr)
1699
1700
1700 if poststatus:
1701 if poststatus:
1701 for ps in poststatus:
1702 for ps in poststatus:
1702 ps(self, status)
1703 ps(self, status)
1703 else:
1704 else:
1704 # in this case, writing changes out breaks
1705 # in this case, writing changes out breaks
1705 # consistency, because .hg/dirstate was
1706 # consistency, because .hg/dirstate was
1706 # already changed simultaneously after last
1707 # already changed simultaneously after last
1707 # caching (see also issue5584 for detail)
1708 # caching (see also issue5584 for detail)
1708 self._repo.ui.debug('skip updating dirstate: '
1709 self._repo.ui.debug('skip updating dirstate: '
1709 'identity mismatch\n')
1710 'identity mismatch\n')
1710 except error.LockError:
1711 except error.LockError:
1711 pass
1712 pass
1712 finally:
1713 finally:
1713 # Even if the wlock couldn't be grabbed, clear out the list.
1714 # Even if the wlock couldn't be grabbed, clear out the list.
1714 self._repo.clearpostdsstatus()
1715 self._repo.clearpostdsstatus()
1715
1716
1716 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1717 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
1717 '''Gets the status from the dirstate -- internal use only.'''
1718 '''Gets the status from the dirstate -- internal use only.'''
1718 subrepos = []
1719 subrepos = []
1719 if '.hgsub' in self:
1720 if '.hgsub' in self:
1720 subrepos = sorted(self.substate)
1721 subrepos = sorted(self.substate)
1721 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1722 cmp, s = self._repo.dirstate.status(match, subrepos, ignored=ignored,
1722 clean=clean, unknown=unknown)
1723 clean=clean, unknown=unknown)
1723
1724
1724 # check for any possibly clean files
1725 # check for any possibly clean files
1725 fixup = []
1726 fixup = []
1726 if cmp:
1727 if cmp:
1727 modified2, deleted2, fixup = self._checklookup(cmp)
1728 modified2, deleted2, fixup = self._checklookup(cmp)
1728 s.modified.extend(modified2)
1729 s.modified.extend(modified2)
1729 s.deleted.extend(deleted2)
1730 s.deleted.extend(deleted2)
1730
1731
1731 if fixup and clean:
1732 if fixup and clean:
1732 s.clean.extend(fixup)
1733 s.clean.extend(fixup)
1733
1734
1734 self._poststatusfixup(s, fixup)
1735 self._poststatusfixup(s, fixup)
1735
1736
1736 if match.always():
1737 if match.always():
1737 # cache for performance
1738 # cache for performance
1738 if s.unknown or s.ignored or s.clean:
1739 if s.unknown or s.ignored or s.clean:
1739 # "_status" is cached with list*=False in the normal route
1740 # "_status" is cached with list*=False in the normal route
1740 self._status = scmutil.status(s.modified, s.added, s.removed,
1741 self._status = scmutil.status(s.modified, s.added, s.removed,
1741 s.deleted, [], [], [])
1742 s.deleted, [], [], [])
1742 else:
1743 else:
1743 self._status = s
1744 self._status = s
1744
1745
1745 return s
1746 return s
1746
1747
1747 @propertycache
1748 @propertycache
1748 def _manifest(self):
1749 def _manifest(self):
1749 """generate a manifest corresponding to the values in self._status
1750 """generate a manifest corresponding to the values in self._status
1750
1751
1751 This reuse the file nodeid from parent, but we use special node
1752 This reuse the file nodeid from parent, but we use special node
1752 identifiers for added and modified files. This is used by manifests
1753 identifiers for added and modified files. This is used by manifests
1753 merge to see that files are different and by update logic to avoid
1754 merge to see that files are different and by update logic to avoid
1754 deleting newly added files.
1755 deleting newly added files.
1755 """
1756 """
1756 return self._buildstatusmanifest(self._status)
1757 return self._buildstatusmanifest(self._status)
1757
1758
1758 def _buildstatusmanifest(self, status):
1759 def _buildstatusmanifest(self, status):
1759 """Builds a manifest that includes the given status results."""
1760 """Builds a manifest that includes the given status results."""
1760 parents = self.parents()
1761 parents = self.parents()
1761
1762
1762 man = parents[0].manifest().copy()
1763 man = parents[0].manifest().copy()
1763
1764
1764 ff = self._flagfunc
1765 ff = self._flagfunc
1765 for i, l in ((addednodeid, status.added),
1766 for i, l in ((addednodeid, status.added),
1766 (modifiednodeid, status.modified)):
1767 (modifiednodeid, status.modified)):
1767 for f in l:
1768 for f in l:
1768 man[f] = i
1769 man[f] = i
1769 try:
1770 try:
1770 man.setflag(f, ff(f))
1771 man.setflag(f, ff(f))
1771 except OSError:
1772 except OSError:
1772 pass
1773 pass
1773
1774
1774 for f in status.deleted + status.removed:
1775 for f in status.deleted + status.removed:
1775 if f in man:
1776 if f in man:
1776 del man[f]
1777 del man[f]
1777
1778
1778 return man
1779 return man
1779
1780
1780 def _buildstatus(self, other, s, match, listignored, listclean,
1781 def _buildstatus(self, other, s, match, listignored, listclean,
1781 listunknown):
1782 listunknown):
1782 """build a status with respect to another context
1783 """build a status with respect to another context
1783
1784
1784 This includes logic for maintaining the fast path of status when
1785 This includes logic for maintaining the fast path of status when
1785 comparing the working directory against its parent, which is to skip
1786 comparing the working directory against its parent, which is to skip
1786 building a new manifest if self (working directory) is not comparing
1787 building a new manifest if self (working directory) is not comparing
1787 against its parent (repo['.']).
1788 against its parent (repo['.']).
1788 """
1789 """
1789 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1790 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1790 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1791 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1791 # might have accidentally ended up with the entire contents of the file
1792 # might have accidentally ended up with the entire contents of the file
1792 # they are supposed to be linking to.
1793 # they are supposed to be linking to.
1793 s.modified[:] = self._filtersuspectsymlink(s.modified)
1794 s.modified[:] = self._filtersuspectsymlink(s.modified)
1794 if other != self._repo['.']:
1795 if other != self._repo['.']:
1795 s = super(workingctx, self)._buildstatus(other, s, match,
1796 s = super(workingctx, self)._buildstatus(other, s, match,
1796 listignored, listclean,
1797 listignored, listclean,
1797 listunknown)
1798 listunknown)
1798 return s
1799 return s
1799
1800
1800 def _matchstatus(self, other, match):
1801 def _matchstatus(self, other, match):
1801 """override the match method with a filter for directory patterns
1802 """override the match method with a filter for directory patterns
1802
1803
1803 We use inheritance to customize the match.bad method only in cases of
1804 We use inheritance to customize the match.bad method only in cases of
1804 workingctx since it belongs only to the working directory when
1805 workingctx since it belongs only to the working directory when
1805 comparing against the parent changeset.
1806 comparing against the parent changeset.
1806
1807
1807 If we aren't comparing against the working directory's parent, then we
1808 If we aren't comparing against the working directory's parent, then we
1808 just use the default match object sent to us.
1809 just use the default match object sent to us.
1809 """
1810 """
1810 if other != self._repo['.']:
1811 if other != self._repo['.']:
1811 def bad(f, msg):
1812 def bad(f, msg):
1812 # 'f' may be a directory pattern from 'match.files()',
1813 # 'f' may be a directory pattern from 'match.files()',
1813 # so 'f not in ctx1' is not enough
1814 # so 'f not in ctx1' is not enough
1814 if f not in other and not other.hasdir(f):
1815 if f not in other and not other.hasdir(f):
1815 self._repo.ui.warn('%s: %s\n' %
1816 self._repo.ui.warn('%s: %s\n' %
1816 (self._repo.dirstate.pathto(f), msg))
1817 (self._repo.dirstate.pathto(f), msg))
1817 match.bad = bad
1818 match.bad = bad
1818 return match
1819 return match
1819
1820
1820 def markcommitted(self, node):
1821 def markcommitted(self, node):
1821 super(workingctx, self).markcommitted(node)
1822 super(workingctx, self).markcommitted(node)
1822
1823
1823 sparse.aftercommit(self._repo, node)
1824 sparse.aftercommit(self._repo, node)
1824
1825
1825 class committablefilectx(basefilectx):
1826 class committablefilectx(basefilectx):
1826 """A committablefilectx provides common functionality for a file context
1827 """A committablefilectx provides common functionality for a file context
1827 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1828 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1828 def __init__(self, repo, path, filelog=None, ctx=None):
1829 def __init__(self, repo, path, filelog=None, ctx=None):
1829 self._repo = repo
1830 self._repo = repo
1830 self._path = path
1831 self._path = path
1831 self._changeid = None
1832 self._changeid = None
1832 self._filerev = self._filenode = None
1833 self._filerev = self._filenode = None
1833
1834
1834 if filelog is not None:
1835 if filelog is not None:
1835 self._filelog = filelog
1836 self._filelog = filelog
1836 if ctx:
1837 if ctx:
1837 self._changectx = ctx
1838 self._changectx = ctx
1838
1839
1839 def __nonzero__(self):
1840 def __nonzero__(self):
1840 return True
1841 return True
1841
1842
1842 __bool__ = __nonzero__
1843 __bool__ = __nonzero__
1843
1844
1844 def linkrev(self):
1845 def linkrev(self):
1845 # linked to self._changectx no matter if file is modified or not
1846 # linked to self._changectx no matter if file is modified or not
1846 return self.rev()
1847 return self.rev()
1847
1848
1848 def parents(self):
1849 def parents(self):
1849 '''return parent filectxs, following copies if necessary'''
1850 '''return parent filectxs, following copies if necessary'''
1850 def filenode(ctx, path):
1851 def filenode(ctx, path):
1851 return ctx._manifest.get(path, nullid)
1852 return ctx._manifest.get(path, nullid)
1852
1853
1853 path = self._path
1854 path = self._path
1854 fl = self._filelog
1855 fl = self._filelog
1855 pcl = self._changectx._parents
1856 pcl = self._changectx._parents
1856 renamed = self.renamed()
1857 renamed = self.renamed()
1857
1858
1858 if renamed:
1859 if renamed:
1859 pl = [renamed + (None,)]
1860 pl = [renamed + (None,)]
1860 else:
1861 else:
1861 pl = [(path, filenode(pcl[0], path), fl)]
1862 pl = [(path, filenode(pcl[0], path), fl)]
1862
1863
1863 for pc in pcl[1:]:
1864 for pc in pcl[1:]:
1864 pl.append((path, filenode(pc, path), fl))
1865 pl.append((path, filenode(pc, path), fl))
1865
1866
1866 return [self._parentfilectx(p, fileid=n, filelog=l)
1867 return [self._parentfilectx(p, fileid=n, filelog=l)
1867 for p, n, l in pl if n != nullid]
1868 for p, n, l in pl if n != nullid]
1868
1869
1869 def children(self):
1870 def children(self):
1870 return []
1871 return []
1871
1872
1872 class workingfilectx(committablefilectx):
1873 class workingfilectx(committablefilectx):
1873 """A workingfilectx object makes access to data related to a particular
1874 """A workingfilectx object makes access to data related to a particular
1874 file in the working directory convenient."""
1875 file in the working directory convenient."""
1875 def __init__(self, repo, path, filelog=None, workingctx=None):
1876 def __init__(self, repo, path, filelog=None, workingctx=None):
1876 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1877 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1877
1878
1878 @propertycache
1879 @propertycache
1879 def _changectx(self):
1880 def _changectx(self):
1880 return workingctx(self._repo)
1881 return workingctx(self._repo)
1881
1882
1882 def data(self):
1883 def data(self):
1883 return self._repo.wread(self._path)
1884 return self._repo.wread(self._path)
1884 def renamed(self):
1885 def renamed(self):
1885 rp = self._repo.dirstate.copied(self._path)
1886 rp = self._repo.dirstate.copied(self._path)
1886 if not rp:
1887 if not rp:
1887 return None
1888 return None
1888 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1889 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1889
1890
1890 def size(self):
1891 def size(self):
1891 return self._repo.wvfs.lstat(self._path).st_size
1892 return self._repo.wvfs.lstat(self._path).st_size
1892 def date(self):
1893 def date(self):
1893 t, tz = self._changectx.date()
1894 t, tz = self._changectx.date()
1894 try:
1895 try:
1895 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1896 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1896 except OSError as err:
1897 except OSError as err:
1897 if err.errno != errno.ENOENT:
1898 if err.errno != errno.ENOENT:
1898 raise
1899 raise
1899 return (t, tz)
1900 return (t, tz)
1900
1901
1901 def exists(self):
1902 def exists(self):
1902 return self._repo.wvfs.exists(self._path)
1903 return self._repo.wvfs.exists(self._path)
1903
1904
1904 def lexists(self):
1905 def lexists(self):
1905 return self._repo.wvfs.lexists(self._path)
1906 return self._repo.wvfs.lexists(self._path)
1906
1907
1907 def audit(self):
1908 def audit(self):
1908 return self._repo.wvfs.audit(self._path)
1909 return self._repo.wvfs.audit(self._path)
1909
1910
1910 def cmp(self, fctx):
1911 def cmp(self, fctx):
1911 """compare with other file context
1912 """compare with other file context
1912
1913
1913 returns True if different than fctx.
1914 returns True if different than fctx.
1914 """
1915 """
1915 # fctx should be a filectx (not a workingfilectx)
1916 # fctx should be a filectx (not a workingfilectx)
1916 # invert comparison to reuse the same code path
1917 # invert comparison to reuse the same code path
1917 return fctx.cmp(self)
1918 return fctx.cmp(self)
1918
1919
1919 def remove(self, ignoremissing=False):
1920 def remove(self, ignoremissing=False):
1920 """wraps unlink for a repo's working directory"""
1921 """wraps unlink for a repo's working directory"""
1921 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1922 self._repo.wvfs.unlinkpath(self._path, ignoremissing=ignoremissing)
1922
1923
1923 def write(self, data, flags, backgroundclose=False, **kwargs):
1924 def write(self, data, flags, backgroundclose=False, **kwargs):
1924 """wraps repo.wwrite"""
1925 """wraps repo.wwrite"""
1925 self._repo.wwrite(self._path, data, flags,
1926 self._repo.wwrite(self._path, data, flags,
1926 backgroundclose=backgroundclose,
1927 backgroundclose=backgroundclose,
1927 **kwargs)
1928 **kwargs)
1928
1929
1929 def markcopied(self, src):
1930 def markcopied(self, src):
1930 """marks this file a copy of `src`"""
1931 """marks this file a copy of `src`"""
1931 if self._repo.dirstate[self._path] in "nma":
1932 if self._repo.dirstate[self._path] in "nma":
1932 self._repo.dirstate.copy(src, self._path)
1933 self._repo.dirstate.copy(src, self._path)
1933
1934
1934 def clearunknown(self):
1935 def clearunknown(self):
1935 """Removes conflicting items in the working directory so that
1936 """Removes conflicting items in the working directory so that
1936 ``write()`` can be called successfully.
1937 ``write()`` can be called successfully.
1937 """
1938 """
1938 wvfs = self._repo.wvfs
1939 wvfs = self._repo.wvfs
1939 f = self._path
1940 f = self._path
1940 wvfs.audit(f)
1941 wvfs.audit(f)
1941 if wvfs.isdir(f) and not wvfs.islink(f):
1942 if wvfs.isdir(f) and not wvfs.islink(f):
1942 wvfs.rmtree(f, forcibly=True)
1943 wvfs.rmtree(f, forcibly=True)
1943 for p in reversed(list(util.finddirs(f))):
1944 for p in reversed(list(util.finddirs(f))):
1944 if wvfs.isfileorlink(p):
1945 if wvfs.isfileorlink(p):
1945 wvfs.unlink(p)
1946 wvfs.unlink(p)
1946 break
1947 break
1947
1948
1948 def setflags(self, l, x):
1949 def setflags(self, l, x):
1949 self._repo.wvfs.setflags(self._path, l, x)
1950 self._repo.wvfs.setflags(self._path, l, x)
1950
1951
1951 class overlayworkingctx(committablectx):
1952 class overlayworkingctx(committablectx):
1952 """Wraps another mutable context with a write-back cache that can be
1953 """Wraps another mutable context with a write-back cache that can be
1953 converted into a commit context.
1954 converted into a commit context.
1954
1955
1955 self._cache[path] maps to a dict with keys: {
1956 self._cache[path] maps to a dict with keys: {
1956 'exists': bool?
1957 'exists': bool?
1957 'date': date?
1958 'date': date?
1958 'data': str?
1959 'data': str?
1959 'flags': str?
1960 'flags': str?
1960 'copied': str? (path or None)
1961 'copied': str? (path or None)
1961 }
1962 }
1962 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1963 If `exists` is True, `flags` must be non-None and 'date' is non-None. If it
1963 is `False`, the file was deleted.
1964 is `False`, the file was deleted.
1964 """
1965 """
1965
1966
1966 def __init__(self, repo):
1967 def __init__(self, repo):
1967 super(overlayworkingctx, self).__init__(repo)
1968 super(overlayworkingctx, self).__init__(repo)
1968 self._repo = repo
1969 self._repo = repo
1969 self.clean()
1970 self.clean()
1970
1971
1971 def setbase(self, wrappedctx):
1972 def setbase(self, wrappedctx):
1972 self._wrappedctx = wrappedctx
1973 self._wrappedctx = wrappedctx
1973 self._parents = [wrappedctx]
1974 self._parents = [wrappedctx]
1974 # Drop old manifest cache as it is now out of date.
1975 # Drop old manifest cache as it is now out of date.
1975 # This is necessary when, e.g., rebasing several nodes with one
1976 # This is necessary when, e.g., rebasing several nodes with one
1976 # ``overlayworkingctx`` (e.g. with --collapse).
1977 # ``overlayworkingctx`` (e.g. with --collapse).
1977 util.clearcachedproperty(self, '_manifest')
1978 util.clearcachedproperty(self, '_manifest')
1978
1979
1979 def data(self, path):
1980 def data(self, path):
1980 if self.isdirty(path):
1981 if self.isdirty(path):
1981 if self._cache[path]['exists']:
1982 if self._cache[path]['exists']:
1982 if self._cache[path]['data']:
1983 if self._cache[path]['data']:
1983 return self._cache[path]['data']
1984 return self._cache[path]['data']
1984 else:
1985 else:
1985 # Must fallback here, too, because we only set flags.
1986 # Must fallback here, too, because we only set flags.
1986 return self._wrappedctx[path].data()
1987 return self._wrappedctx[path].data()
1987 else:
1988 else:
1988 raise error.ProgrammingError("No such file or directory: %s" %
1989 raise error.ProgrammingError("No such file or directory: %s" %
1989 path)
1990 path)
1990 else:
1991 else:
1991 return self._wrappedctx[path].data()
1992 return self._wrappedctx[path].data()
1992
1993
1993 @propertycache
1994 @propertycache
1994 def _manifest(self):
1995 def _manifest(self):
1995 parents = self.parents()
1996 parents = self.parents()
1996 man = parents[0].manifest().copy()
1997 man = parents[0].manifest().copy()
1997
1998
1998 flag = self._flagfunc
1999 flag = self._flagfunc
1999 for path in self.added():
2000 for path in self.added():
2000 man[path] = addednodeid
2001 man[path] = addednodeid
2001 man.setflag(path, flag(path))
2002 man.setflag(path, flag(path))
2002 for path in self.modified():
2003 for path in self.modified():
2003 man[path] = modifiednodeid
2004 man[path] = modifiednodeid
2004 man.setflag(path, flag(path))
2005 man.setflag(path, flag(path))
2005 for path in self.removed():
2006 for path in self.removed():
2006 del man[path]
2007 del man[path]
2007 return man
2008 return man
2008
2009
2009 @propertycache
2010 @propertycache
2010 def _flagfunc(self):
2011 def _flagfunc(self):
2011 def f(path):
2012 def f(path):
2012 return self._cache[path]['flags']
2013 return self._cache[path]['flags']
2013 return f
2014 return f
2014
2015
2015 def files(self):
2016 def files(self):
2016 return sorted(self.added() + self.modified() + self.removed())
2017 return sorted(self.added() + self.modified() + self.removed())
2017
2018
2018 def modified(self):
2019 def modified(self):
2019 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2020 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2020 self._existsinparent(f)]
2021 self._existsinparent(f)]
2021
2022
2022 def added(self):
2023 def added(self):
2023 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2024 return [f for f in self._cache.keys() if self._cache[f]['exists'] and
2024 not self._existsinparent(f)]
2025 not self._existsinparent(f)]
2025
2026
2026 def removed(self):
2027 def removed(self):
2027 return [f for f in self._cache.keys() if
2028 return [f for f in self._cache.keys() if
2028 not self._cache[f]['exists'] and self._existsinparent(f)]
2029 not self._cache[f]['exists'] and self._existsinparent(f)]
2029
2030
2030 def isinmemory(self):
2031 def isinmemory(self):
2031 return True
2032 return True
2032
2033
2033 def filedate(self, path):
2034 def filedate(self, path):
2034 if self.isdirty(path):
2035 if self.isdirty(path):
2035 return self._cache[path]['date']
2036 return self._cache[path]['date']
2036 else:
2037 else:
2037 return self._wrappedctx[path].date()
2038 return self._wrappedctx[path].date()
2038
2039
2039 def markcopied(self, path, origin):
2040 def markcopied(self, path, origin):
2040 if self.isdirty(path):
2041 if self.isdirty(path):
2041 self._cache[path]['copied'] = origin
2042 self._cache[path]['copied'] = origin
2042 else:
2043 else:
2043 raise error.ProgrammingError('markcopied() called on clean context')
2044 raise error.ProgrammingError('markcopied() called on clean context')
2044
2045
2045 def copydata(self, path):
2046 def copydata(self, path):
2046 if self.isdirty(path):
2047 if self.isdirty(path):
2047 return self._cache[path]['copied']
2048 return self._cache[path]['copied']
2048 else:
2049 else:
2049 raise error.ProgrammingError('copydata() called on clean context')
2050 raise error.ProgrammingError('copydata() called on clean context')
2050
2051
2051 def flags(self, path):
2052 def flags(self, path):
2052 if self.isdirty(path):
2053 if self.isdirty(path):
2053 if self._cache[path]['exists']:
2054 if self._cache[path]['exists']:
2054 return self._cache[path]['flags']
2055 return self._cache[path]['flags']
2055 else:
2056 else:
2056 raise error.ProgrammingError("No such file or directory: %s" %
2057 raise error.ProgrammingError("No such file or directory: %s" %
2057 self._path)
2058 self._path)
2058 else:
2059 else:
2059 return self._wrappedctx[path].flags()
2060 return self._wrappedctx[path].flags()
2060
2061
2061 def _existsinparent(self, path):
2062 def _existsinparent(self, path):
2062 try:
2063 try:
2063 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2064 # ``commitctx` raises a ``ManifestLookupError`` if a path does not
2064 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2065 # exist, unlike ``workingctx``, which returns a ``workingfilectx``
2065 # with an ``exists()`` function.
2066 # with an ``exists()`` function.
2066 self._wrappedctx[path]
2067 self._wrappedctx[path]
2067 return True
2068 return True
2068 except error.ManifestLookupError:
2069 except error.ManifestLookupError:
2069 return False
2070 return False
2070
2071
2071 def _auditconflicts(self, path):
2072 def _auditconflicts(self, path):
2072 """Replicates conflict checks done by wvfs.write().
2073 """Replicates conflict checks done by wvfs.write().
2073
2074
2074 Since we never write to the filesystem and never call `applyupdates` in
2075 Since we never write to the filesystem and never call `applyupdates` in
2075 IMM, we'll never check that a path is actually writable -- e.g., because
2076 IMM, we'll never check that a path is actually writable -- e.g., because
2076 it adds `a/foo`, but `a` is actually a file in the other commit.
2077 it adds `a/foo`, but `a` is actually a file in the other commit.
2077 """
2078 """
2078 def fail(path, component):
2079 def fail(path, component):
2079 # p1() is the base and we're receiving "writes" for p2()'s
2080 # p1() is the base and we're receiving "writes" for p2()'s
2080 # files.
2081 # files.
2081 if 'l' in self.p1()[component].flags():
2082 if 'l' in self.p1()[component].flags():
2082 raise error.Abort("error: %s conflicts with symlink %s "
2083 raise error.Abort("error: %s conflicts with symlink %s "
2083 "in %s." % (path, component,
2084 "in %s." % (path, component,
2084 self.p1().rev()))
2085 self.p1().rev()))
2085 else:
2086 else:
2086 raise error.Abort("error: '%s' conflicts with file '%s' in "
2087 raise error.Abort("error: '%s' conflicts with file '%s' in "
2087 "%s." % (path, component,
2088 "%s." % (path, component,
2088 self.p1().rev()))
2089 self.p1().rev()))
2089
2090
2090 # Test that each new directory to be created to write this path from p2
2091 # Test that each new directory to be created to write this path from p2
2091 # is not a file in p1.
2092 # is not a file in p1.
2092 components = path.split('/')
2093 components = path.split('/')
2093 for i in xrange(len(components)):
2094 for i in xrange(len(components)):
2094 component = "/".join(components[0:i])
2095 component = "/".join(components[0:i])
2095 if component in self.p1():
2096 if component in self.p1():
2096 fail(path, component)
2097 fail(path, component)
2097
2098
2098 # Test the other direction -- that this path from p2 isn't a directory
2099 # Test the other direction -- that this path from p2 isn't a directory
2099 # in p1 (test that p1 doesn't any paths matching `path/*`).
2100 # in p1 (test that p1 doesn't any paths matching `path/*`).
2100 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
2101 match = matchmod.match('/', '', [path + '/'], default=b'relpath')
2101 matches = self.p1().manifest().matches(match)
2102 matches = self.p1().manifest().matches(match)
2102 if len(matches) > 0:
2103 if len(matches) > 0:
2103 if len(matches) == 1 and matches.keys()[0] == path:
2104 if len(matches) == 1 and matches.keys()[0] == path:
2104 return
2105 return
2105 raise error.Abort("error: file '%s' cannot be written because "
2106 raise error.Abort("error: file '%s' cannot be written because "
2106 " '%s/' is a folder in %s (containing %d "
2107 " '%s/' is a folder in %s (containing %d "
2107 "entries: %s)"
2108 "entries: %s)"
2108 % (path, path, self.p1(), len(matches),
2109 % (path, path, self.p1(), len(matches),
2109 ', '.join(matches.keys())))
2110 ', '.join(matches.keys())))
2110
2111
2111 def write(self, path, data, flags='', **kwargs):
2112 def write(self, path, data, flags='', **kwargs):
2112 if data is None:
2113 if data is None:
2113 raise error.ProgrammingError("data must be non-None")
2114 raise error.ProgrammingError("data must be non-None")
2114 self._auditconflicts(path)
2115 self._auditconflicts(path)
2115 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
2116 self._markdirty(path, exists=True, data=data, date=dateutil.makedate(),
2116 flags=flags)
2117 flags=flags)
2117
2118
2118 def setflags(self, path, l, x):
2119 def setflags(self, path, l, x):
2119 self._markdirty(path, exists=True, date=dateutil.makedate(),
2120 self._markdirty(path, exists=True, date=dateutil.makedate(),
2120 flags=(l and 'l' or '') + (x and 'x' or ''))
2121 flags=(l and 'l' or '') + (x and 'x' or ''))
2121
2122
2122 def remove(self, path):
2123 def remove(self, path):
2123 self._markdirty(path, exists=False)
2124 self._markdirty(path, exists=False)
2124
2125
2125 def exists(self, path):
2126 def exists(self, path):
2126 """exists behaves like `lexists`, but needs to follow symlinks and
2127 """exists behaves like `lexists`, but needs to follow symlinks and
2127 return False if they are broken.
2128 return False if they are broken.
2128 """
2129 """
2129 if self.isdirty(path):
2130 if self.isdirty(path):
2130 # If this path exists and is a symlink, "follow" it by calling
2131 # If this path exists and is a symlink, "follow" it by calling
2131 # exists on the destination path.
2132 # exists on the destination path.
2132 if (self._cache[path]['exists'] and
2133 if (self._cache[path]['exists'] and
2133 'l' in self._cache[path]['flags']):
2134 'l' in self._cache[path]['flags']):
2134 return self.exists(self._cache[path]['data'].strip())
2135 return self.exists(self._cache[path]['data'].strip())
2135 else:
2136 else:
2136 return self._cache[path]['exists']
2137 return self._cache[path]['exists']
2137
2138
2138 return self._existsinparent(path)
2139 return self._existsinparent(path)
2139
2140
2140 def lexists(self, path):
2141 def lexists(self, path):
2141 """lexists returns True if the path exists"""
2142 """lexists returns True if the path exists"""
2142 if self.isdirty(path):
2143 if self.isdirty(path):
2143 return self._cache[path]['exists']
2144 return self._cache[path]['exists']
2144
2145
2145 return self._existsinparent(path)
2146 return self._existsinparent(path)
2146
2147
2147 def size(self, path):
2148 def size(self, path):
2148 if self.isdirty(path):
2149 if self.isdirty(path):
2149 if self._cache[path]['exists']:
2150 if self._cache[path]['exists']:
2150 return len(self._cache[path]['data'])
2151 return len(self._cache[path]['data'])
2151 else:
2152 else:
2152 raise error.ProgrammingError("No such file or directory: %s" %
2153 raise error.ProgrammingError("No such file or directory: %s" %
2153 self._path)
2154 self._path)
2154 return self._wrappedctx[path].size()
2155 return self._wrappedctx[path].size()
2155
2156
2156 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2157 def tomemctx(self, text, branch=None, extra=None, date=None, parents=None,
2157 user=None, editor=None):
2158 user=None, editor=None):
2158 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2159 """Converts this ``overlayworkingctx`` into a ``memctx`` ready to be
2159 committed.
2160 committed.
2160
2161
2161 ``text`` is the commit message.
2162 ``text`` is the commit message.
2162 ``parents`` (optional) are rev numbers.
2163 ``parents`` (optional) are rev numbers.
2163 """
2164 """
2164 # Default parents to the wrapped contexts' if not passed.
2165 # Default parents to the wrapped contexts' if not passed.
2165 if parents is None:
2166 if parents is None:
2166 parents = self._wrappedctx.parents()
2167 parents = self._wrappedctx.parents()
2167 if len(parents) == 1:
2168 if len(parents) == 1:
2168 parents = (parents[0], None)
2169 parents = (parents[0], None)
2169
2170
2170 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2171 # ``parents`` is passed as rev numbers; convert to ``commitctxs``.
2171 if parents[1] is None:
2172 if parents[1] is None:
2172 parents = (self._repo[parents[0]], None)
2173 parents = (self._repo[parents[0]], None)
2173 else:
2174 else:
2174 parents = (self._repo[parents[0]], self._repo[parents[1]])
2175 parents = (self._repo[parents[0]], self._repo[parents[1]])
2175
2176
2176 files = self._cache.keys()
2177 files = self._cache.keys()
2177 def getfile(repo, memctx, path):
2178 def getfile(repo, memctx, path):
2178 if self._cache[path]['exists']:
2179 if self._cache[path]['exists']:
2179 return memfilectx(repo, memctx, path,
2180 return memfilectx(repo, memctx, path,
2180 self._cache[path]['data'],
2181 self._cache[path]['data'],
2181 'l' in self._cache[path]['flags'],
2182 'l' in self._cache[path]['flags'],
2182 'x' in self._cache[path]['flags'],
2183 'x' in self._cache[path]['flags'],
2183 self._cache[path]['copied'])
2184 self._cache[path]['copied'])
2184 else:
2185 else:
2185 # Returning None, but including the path in `files`, is
2186 # Returning None, but including the path in `files`, is
2186 # necessary for memctx to register a deletion.
2187 # necessary for memctx to register a deletion.
2187 return None
2188 return None
2188 return memctx(self._repo, parents, text, files, getfile, date=date,
2189 return memctx(self._repo, parents, text, files, getfile, date=date,
2189 extra=extra, user=user, branch=branch, editor=editor)
2190 extra=extra, user=user, branch=branch, editor=editor)
2190
2191
2191 def isdirty(self, path):
2192 def isdirty(self, path):
2192 return path in self._cache
2193 return path in self._cache
2193
2194
2194 def isempty(self):
2195 def isempty(self):
2195 # We need to discard any keys that are actually clean before the empty
2196 # We need to discard any keys that are actually clean before the empty
2196 # commit check.
2197 # commit check.
2197 self._compact()
2198 self._compact()
2198 return len(self._cache) == 0
2199 return len(self._cache) == 0
2199
2200
2200 def clean(self):
2201 def clean(self):
2201 self._cache = {}
2202 self._cache = {}
2202
2203
2203 def _compact(self):
2204 def _compact(self):
2204 """Removes keys from the cache that are actually clean, by comparing
2205 """Removes keys from the cache that are actually clean, by comparing
2205 them with the underlying context.
2206 them with the underlying context.
2206
2207
2207 This can occur during the merge process, e.g. by passing --tool :local
2208 This can occur during the merge process, e.g. by passing --tool :local
2208 to resolve a conflict.
2209 to resolve a conflict.
2209 """
2210 """
2210 keys = []
2211 keys = []
2211 for path in self._cache.keys():
2212 for path in self._cache.keys():
2212 cache = self._cache[path]
2213 cache = self._cache[path]
2213 try:
2214 try:
2214 underlying = self._wrappedctx[path]
2215 underlying = self._wrappedctx[path]
2215 if (underlying.data() == cache['data'] and
2216 if (underlying.data() == cache['data'] and
2216 underlying.flags() == cache['flags']):
2217 underlying.flags() == cache['flags']):
2217 keys.append(path)
2218 keys.append(path)
2218 except error.ManifestLookupError:
2219 except error.ManifestLookupError:
2219 # Path not in the underlying manifest (created).
2220 # Path not in the underlying manifest (created).
2220 continue
2221 continue
2221
2222
2222 for path in keys:
2223 for path in keys:
2223 del self._cache[path]
2224 del self._cache[path]
2224 return keys
2225 return keys
2225
2226
2226 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2227 def _markdirty(self, path, exists, data=None, date=None, flags=''):
2227 self._cache[path] = {
2228 self._cache[path] = {
2228 'exists': exists,
2229 'exists': exists,
2229 'data': data,
2230 'data': data,
2230 'date': date,
2231 'date': date,
2231 'flags': flags,
2232 'flags': flags,
2232 'copied': None,
2233 'copied': None,
2233 }
2234 }
2234
2235
2235 def filectx(self, path, filelog=None):
2236 def filectx(self, path, filelog=None):
2236 return overlayworkingfilectx(self._repo, path, parent=self,
2237 return overlayworkingfilectx(self._repo, path, parent=self,
2237 filelog=filelog)
2238 filelog=filelog)
2238
2239
2239 class overlayworkingfilectx(committablefilectx):
2240 class overlayworkingfilectx(committablefilectx):
2240 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2241 """Wrap a ``workingfilectx`` but intercepts all writes into an in-memory
2241 cache, which can be flushed through later by calling ``flush()``."""
2242 cache, which can be flushed through later by calling ``flush()``."""
2242
2243
2243 def __init__(self, repo, path, filelog=None, parent=None):
2244 def __init__(self, repo, path, filelog=None, parent=None):
2244 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2245 super(overlayworkingfilectx, self).__init__(repo, path, filelog,
2245 parent)
2246 parent)
2246 self._repo = repo
2247 self._repo = repo
2247 self._parent = parent
2248 self._parent = parent
2248 self._path = path
2249 self._path = path
2249
2250
2250 def cmp(self, fctx):
2251 def cmp(self, fctx):
2251 return self.data() != fctx.data()
2252 return self.data() != fctx.data()
2252
2253
2253 def changectx(self):
2254 def changectx(self):
2254 return self._parent
2255 return self._parent
2255
2256
2256 def data(self):
2257 def data(self):
2257 return self._parent.data(self._path)
2258 return self._parent.data(self._path)
2258
2259
2259 def date(self):
2260 def date(self):
2260 return self._parent.filedate(self._path)
2261 return self._parent.filedate(self._path)
2261
2262
2262 def exists(self):
2263 def exists(self):
2263 return self.lexists()
2264 return self.lexists()
2264
2265
2265 def lexists(self):
2266 def lexists(self):
2266 return self._parent.exists(self._path)
2267 return self._parent.exists(self._path)
2267
2268
2268 def renamed(self):
2269 def renamed(self):
2269 path = self._parent.copydata(self._path)
2270 path = self._parent.copydata(self._path)
2270 if not path:
2271 if not path:
2271 return None
2272 return None
2272 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2273 return path, self._changectx._parents[0]._manifest.get(path, nullid)
2273
2274
2274 def size(self):
2275 def size(self):
2275 return self._parent.size(self._path)
2276 return self._parent.size(self._path)
2276
2277
2277 def markcopied(self, origin):
2278 def markcopied(self, origin):
2278 self._parent.markcopied(self._path, origin)
2279 self._parent.markcopied(self._path, origin)
2279
2280
2280 def audit(self):
2281 def audit(self):
2281 pass
2282 pass
2282
2283
2283 def flags(self):
2284 def flags(self):
2284 return self._parent.flags(self._path)
2285 return self._parent.flags(self._path)
2285
2286
2286 def setflags(self, islink, isexec):
2287 def setflags(self, islink, isexec):
2287 return self._parent.setflags(self._path, islink, isexec)
2288 return self._parent.setflags(self._path, islink, isexec)
2288
2289
2289 def write(self, data, flags, backgroundclose=False, **kwargs):
2290 def write(self, data, flags, backgroundclose=False, **kwargs):
2290 return self._parent.write(self._path, data, flags, **kwargs)
2291 return self._parent.write(self._path, data, flags, **kwargs)
2291
2292
2292 def remove(self, ignoremissing=False):
2293 def remove(self, ignoremissing=False):
2293 return self._parent.remove(self._path)
2294 return self._parent.remove(self._path)
2294
2295
2295 def clearunknown(self):
2296 def clearunknown(self):
2296 pass
2297 pass
2297
2298
2298 class workingcommitctx(workingctx):
2299 class workingcommitctx(workingctx):
2299 """A workingcommitctx object makes access to data related to
2300 """A workingcommitctx object makes access to data related to
2300 the revision being committed convenient.
2301 the revision being committed convenient.
2301
2302
2302 This hides changes in the working directory, if they aren't
2303 This hides changes in the working directory, if they aren't
2303 committed in this context.
2304 committed in this context.
2304 """
2305 """
2305 def __init__(self, repo, changes,
2306 def __init__(self, repo, changes,
2306 text="", user=None, date=None, extra=None):
2307 text="", user=None, date=None, extra=None):
2307 super(workingctx, self).__init__(repo, text, user, date, extra,
2308 super(workingctx, self).__init__(repo, text, user, date, extra,
2308 changes)
2309 changes)
2309
2310
2310 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2311 def _dirstatestatus(self, match, ignored=False, clean=False, unknown=False):
2311 """Return matched files only in ``self._status``
2312 """Return matched files only in ``self._status``
2312
2313
2313 Uncommitted files appear "clean" via this context, even if
2314 Uncommitted files appear "clean" via this context, even if
2314 they aren't actually so in the working directory.
2315 they aren't actually so in the working directory.
2315 """
2316 """
2316 if clean:
2317 if clean:
2317 clean = [f for f in self._manifest if f not in self._changedset]
2318 clean = [f for f in self._manifest if f not in self._changedset]
2318 else:
2319 else:
2319 clean = []
2320 clean = []
2320 return scmutil.status([f for f in self._status.modified if match(f)],
2321 return scmutil.status([f for f in self._status.modified if match(f)],
2321 [f for f in self._status.added if match(f)],
2322 [f for f in self._status.added if match(f)],
2322 [f for f in self._status.removed if match(f)],
2323 [f for f in self._status.removed if match(f)],
2323 [], [], [], clean)
2324 [], [], [], clean)
2324
2325
2325 @propertycache
2326 @propertycache
2326 def _changedset(self):
2327 def _changedset(self):
2327 """Return the set of files changed in this context
2328 """Return the set of files changed in this context
2328 """
2329 """
2329 changed = set(self._status.modified)
2330 changed = set(self._status.modified)
2330 changed.update(self._status.added)
2331 changed.update(self._status.added)
2331 changed.update(self._status.removed)
2332 changed.update(self._status.removed)
2332 return changed
2333 return changed
2333
2334
2334 def makecachingfilectxfn(func):
2335 def makecachingfilectxfn(func):
2335 """Create a filectxfn that caches based on the path.
2336 """Create a filectxfn that caches based on the path.
2336
2337
2337 We can't use util.cachefunc because it uses all arguments as the cache
2338 We can't use util.cachefunc because it uses all arguments as the cache
2338 key and this creates a cycle since the arguments include the repo and
2339 key and this creates a cycle since the arguments include the repo and
2339 memctx.
2340 memctx.
2340 """
2341 """
2341 cache = {}
2342 cache = {}
2342
2343
2343 def getfilectx(repo, memctx, path):
2344 def getfilectx(repo, memctx, path):
2344 if path not in cache:
2345 if path not in cache:
2345 cache[path] = func(repo, memctx, path)
2346 cache[path] = func(repo, memctx, path)
2346 return cache[path]
2347 return cache[path]
2347
2348
2348 return getfilectx
2349 return getfilectx
2349
2350
2350 def memfilefromctx(ctx):
2351 def memfilefromctx(ctx):
2351 """Given a context return a memfilectx for ctx[path]
2352 """Given a context return a memfilectx for ctx[path]
2352
2353
2353 This is a convenience method for building a memctx based on another
2354 This is a convenience method for building a memctx based on another
2354 context.
2355 context.
2355 """
2356 """
2356 def getfilectx(repo, memctx, path):
2357 def getfilectx(repo, memctx, path):
2357 fctx = ctx[path]
2358 fctx = ctx[path]
2358 # this is weird but apparently we only keep track of one parent
2359 # this is weird but apparently we only keep track of one parent
2359 # (why not only store that instead of a tuple?)
2360 # (why not only store that instead of a tuple?)
2360 copied = fctx.renamed()
2361 copied = fctx.renamed()
2361 if copied:
2362 if copied:
2362 copied = copied[0]
2363 copied = copied[0]
2363 return memfilectx(repo, memctx, path, fctx.data(),
2364 return memfilectx(repo, memctx, path, fctx.data(),
2364 islink=fctx.islink(), isexec=fctx.isexec(),
2365 islink=fctx.islink(), isexec=fctx.isexec(),
2365 copied=copied)
2366 copied=copied)
2366
2367
2367 return getfilectx
2368 return getfilectx
2368
2369
2369 def memfilefrompatch(patchstore):
2370 def memfilefrompatch(patchstore):
2370 """Given a patch (e.g. patchstore object) return a memfilectx
2371 """Given a patch (e.g. patchstore object) return a memfilectx
2371
2372
2372 This is a convenience method for building a memctx based on a patchstore.
2373 This is a convenience method for building a memctx based on a patchstore.
2373 """
2374 """
2374 def getfilectx(repo, memctx, path):
2375 def getfilectx(repo, memctx, path):
2375 data, mode, copied = patchstore.getfile(path)
2376 data, mode, copied = patchstore.getfile(path)
2376 if data is None:
2377 if data is None:
2377 return None
2378 return None
2378 islink, isexec = mode
2379 islink, isexec = mode
2379 return memfilectx(repo, memctx, path, data, islink=islink,
2380 return memfilectx(repo, memctx, path, data, islink=islink,
2380 isexec=isexec, copied=copied)
2381 isexec=isexec, copied=copied)
2381
2382
2382 return getfilectx
2383 return getfilectx
2383
2384
2384 class memctx(committablectx):
2385 class memctx(committablectx):
2385 """Use memctx to perform in-memory commits via localrepo.commitctx().
2386 """Use memctx to perform in-memory commits via localrepo.commitctx().
2386
2387
2387 Revision information is supplied at initialization time while
2388 Revision information is supplied at initialization time while
2388 related files data and is made available through a callback
2389 related files data and is made available through a callback
2389 mechanism. 'repo' is the current localrepo, 'parents' is a
2390 mechanism. 'repo' is the current localrepo, 'parents' is a
2390 sequence of two parent revisions identifiers (pass None for every
2391 sequence of two parent revisions identifiers (pass None for every
2391 missing parent), 'text' is the commit message and 'files' lists
2392 missing parent), 'text' is the commit message and 'files' lists
2392 names of files touched by the revision (normalized and relative to
2393 names of files touched by the revision (normalized and relative to
2393 repository root).
2394 repository root).
2394
2395
2395 filectxfn(repo, memctx, path) is a callable receiving the
2396 filectxfn(repo, memctx, path) is a callable receiving the
2396 repository, the current memctx object and the normalized path of
2397 repository, the current memctx object and the normalized path of
2397 requested file, relative to repository root. It is fired by the
2398 requested file, relative to repository root. It is fired by the
2398 commit function for every file in 'files', but calls order is
2399 commit function for every file in 'files', but calls order is
2399 undefined. If the file is available in the revision being
2400 undefined. If the file is available in the revision being
2400 committed (updated or added), filectxfn returns a memfilectx
2401 committed (updated or added), filectxfn returns a memfilectx
2401 object. If the file was removed, filectxfn return None for recent
2402 object. If the file was removed, filectxfn return None for recent
2402 Mercurial. Moved files are represented by marking the source file
2403 Mercurial. Moved files are represented by marking the source file
2403 removed and the new file added with copy information (see
2404 removed and the new file added with copy information (see
2404 memfilectx).
2405 memfilectx).
2405
2406
2406 user receives the committer name and defaults to current
2407 user receives the committer name and defaults to current
2407 repository username, date is the commit date in any format
2408 repository username, date is the commit date in any format
2408 supported by dateutil.parsedate() and defaults to current date, extra
2409 supported by dateutil.parsedate() and defaults to current date, extra
2409 is a dictionary of metadata or is left empty.
2410 is a dictionary of metadata or is left empty.
2410 """
2411 """
2411
2412
2412 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2413 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
2413 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2414 # Extensions that need to retain compatibility across Mercurial 3.1 can use
2414 # this field to determine what to do in filectxfn.
2415 # this field to determine what to do in filectxfn.
2415 _returnnoneformissingfiles = True
2416 _returnnoneformissingfiles = True
2416
2417
2417 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2418 def __init__(self, repo, parents, text, files, filectxfn, user=None,
2418 date=None, extra=None, branch=None, editor=False):
2419 date=None, extra=None, branch=None, editor=False):
2419 super(memctx, self).__init__(repo, text, user, date, extra)
2420 super(memctx, self).__init__(repo, text, user, date, extra)
2420 self._rev = None
2421 self._rev = None
2421 self._node = None
2422 self._node = None
2422 parents = [(p or nullid) for p in parents]
2423 parents = [(p or nullid) for p in parents]
2423 p1, p2 = parents
2424 p1, p2 = parents
2424 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2425 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
2425 files = sorted(set(files))
2426 files = sorted(set(files))
2426 self._files = files
2427 self._files = files
2427 if branch is not None:
2428 if branch is not None:
2428 self._extra['branch'] = encoding.fromlocal(branch)
2429 self._extra['branch'] = encoding.fromlocal(branch)
2429 self.substate = {}
2430 self.substate = {}
2430
2431
2431 if isinstance(filectxfn, patch.filestore):
2432 if isinstance(filectxfn, patch.filestore):
2432 filectxfn = memfilefrompatch(filectxfn)
2433 filectxfn = memfilefrompatch(filectxfn)
2433 elif not callable(filectxfn):
2434 elif not callable(filectxfn):
2434 # if store is not callable, wrap it in a function
2435 # if store is not callable, wrap it in a function
2435 filectxfn = memfilefromctx(filectxfn)
2436 filectxfn = memfilefromctx(filectxfn)
2436
2437
2437 # memoizing increases performance for e.g. vcs convert scenarios.
2438 # memoizing increases performance for e.g. vcs convert scenarios.
2438 self._filectxfn = makecachingfilectxfn(filectxfn)
2439 self._filectxfn = makecachingfilectxfn(filectxfn)
2439
2440
2440 if editor:
2441 if editor:
2441 self._text = editor(self._repo, self, [])
2442 self._text = editor(self._repo, self, [])
2442 self._repo.savecommitmessage(self._text)
2443 self._repo.savecommitmessage(self._text)
2443
2444
2444 def filectx(self, path, filelog=None):
2445 def filectx(self, path, filelog=None):
2445 """get a file context from the working directory
2446 """get a file context from the working directory
2446
2447
2447 Returns None if file doesn't exist and should be removed."""
2448 Returns None if file doesn't exist and should be removed."""
2448 return self._filectxfn(self._repo, self, path)
2449 return self._filectxfn(self._repo, self, path)
2449
2450
2450 def commit(self):
2451 def commit(self):
2451 """commit context to the repo"""
2452 """commit context to the repo"""
2452 return self._repo.commitctx(self)
2453 return self._repo.commitctx(self)
2453
2454
2454 @propertycache
2455 @propertycache
2455 def _manifest(self):
2456 def _manifest(self):
2456 """generate a manifest based on the return values of filectxfn"""
2457 """generate a manifest based on the return values of filectxfn"""
2457
2458
2458 # keep this simple for now; just worry about p1
2459 # keep this simple for now; just worry about p1
2459 pctx = self._parents[0]
2460 pctx = self._parents[0]
2460 man = pctx.manifest().copy()
2461 man = pctx.manifest().copy()
2461
2462
2462 for f in self._status.modified:
2463 for f in self._status.modified:
2463 p1node = nullid
2464 p1node = nullid
2464 p2node = nullid
2465 p2node = nullid
2465 p = pctx[f].parents() # if file isn't in pctx, check p2?
2466 p = pctx[f].parents() # if file isn't in pctx, check p2?
2466 if len(p) > 0:
2467 if len(p) > 0:
2467 p1node = p[0].filenode()
2468 p1node = p[0].filenode()
2468 if len(p) > 1:
2469 if len(p) > 1:
2469 p2node = p[1].filenode()
2470 p2node = p[1].filenode()
2470 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2471 man[f] = revlog.hash(self[f].data(), p1node, p2node)
2471
2472
2472 for f in self._status.added:
2473 for f in self._status.added:
2473 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2474 man[f] = revlog.hash(self[f].data(), nullid, nullid)
2474
2475
2475 for f in self._status.removed:
2476 for f in self._status.removed:
2476 if f in man:
2477 if f in man:
2477 del man[f]
2478 del man[f]
2478
2479
2479 return man
2480 return man
2480
2481
2481 @propertycache
2482 @propertycache
2482 def _status(self):
2483 def _status(self):
2483 """Calculate exact status from ``files`` specified at construction
2484 """Calculate exact status from ``files`` specified at construction
2484 """
2485 """
2485 man1 = self.p1().manifest()
2486 man1 = self.p1().manifest()
2486 p2 = self._parents[1]
2487 p2 = self._parents[1]
2487 # "1 < len(self._parents)" can't be used for checking
2488 # "1 < len(self._parents)" can't be used for checking
2488 # existence of the 2nd parent, because "memctx._parents" is
2489 # existence of the 2nd parent, because "memctx._parents" is
2489 # explicitly initialized by the list, of which length is 2.
2490 # explicitly initialized by the list, of which length is 2.
2490 if p2.node() != nullid:
2491 if p2.node() != nullid:
2491 man2 = p2.manifest()
2492 man2 = p2.manifest()
2492 managing = lambda f: f in man1 or f in man2
2493 managing = lambda f: f in man1 or f in man2
2493 else:
2494 else:
2494 managing = lambda f: f in man1
2495 managing = lambda f: f in man1
2495
2496
2496 modified, added, removed = [], [], []
2497 modified, added, removed = [], [], []
2497 for f in self._files:
2498 for f in self._files:
2498 if not managing(f):
2499 if not managing(f):
2499 added.append(f)
2500 added.append(f)
2500 elif self[f]:
2501 elif self[f]:
2501 modified.append(f)
2502 modified.append(f)
2502 else:
2503 else:
2503 removed.append(f)
2504 removed.append(f)
2504
2505
2505 return scmutil.status(modified, added, removed, [], [], [], [])
2506 return scmutil.status(modified, added, removed, [], [], [], [])
2506
2507
2507 class memfilectx(committablefilectx):
2508 class memfilectx(committablefilectx):
2508 """memfilectx represents an in-memory file to commit.
2509 """memfilectx represents an in-memory file to commit.
2509
2510
2510 See memctx and committablefilectx for more details.
2511 See memctx and committablefilectx for more details.
2511 """
2512 """
2512 def __init__(self, repo, changectx, path, data, islink=False,
2513 def __init__(self, repo, changectx, path, data, islink=False,
2513 isexec=False, copied=None):
2514 isexec=False, copied=None):
2514 """
2515 """
2515 path is the normalized file path relative to repository root.
2516 path is the normalized file path relative to repository root.
2516 data is the file content as a string.
2517 data is the file content as a string.
2517 islink is True if the file is a symbolic link.
2518 islink is True if the file is a symbolic link.
2518 isexec is True if the file is executable.
2519 isexec is True if the file is executable.
2519 copied is the source file path if current file was copied in the
2520 copied is the source file path if current file was copied in the
2520 revision being committed, or None."""
2521 revision being committed, or None."""
2521 super(memfilectx, self).__init__(repo, path, None, changectx)
2522 super(memfilectx, self).__init__(repo, path, None, changectx)
2522 self._data = data
2523 self._data = data
2523 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2524 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
2524 self._copied = None
2525 self._copied = None
2525 if copied:
2526 if copied:
2526 self._copied = (copied, nullid)
2527 self._copied = (copied, nullid)
2527
2528
2528 def data(self):
2529 def data(self):
2529 return self._data
2530 return self._data
2530
2531
2531 def remove(self, ignoremissing=False):
2532 def remove(self, ignoremissing=False):
2532 """wraps unlink for a repo's working directory"""
2533 """wraps unlink for a repo's working directory"""
2533 # need to figure out what to do here
2534 # need to figure out what to do here
2534 del self._changectx[self._path]
2535 del self._changectx[self._path]
2535
2536
2536 def write(self, data, flags, **kwargs):
2537 def write(self, data, flags, **kwargs):
2537 """wraps repo.wwrite"""
2538 """wraps repo.wwrite"""
2538 self._data = data
2539 self._data = data
2539
2540
2540 class overlayfilectx(committablefilectx):
2541 class overlayfilectx(committablefilectx):
2541 """Like memfilectx but take an original filectx and optional parameters to
2542 """Like memfilectx but take an original filectx and optional parameters to
2542 override parts of it. This is useful when fctx.data() is expensive (i.e.
2543 override parts of it. This is useful when fctx.data() is expensive (i.e.
2543 flag processor is expensive) and raw data, flags, and filenode could be
2544 flag processor is expensive) and raw data, flags, and filenode could be
2544 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2545 reused (ex. rebase or mode-only amend a REVIDX_EXTSTORED file).
2545 """
2546 """
2546
2547
2547 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2548 def __init__(self, originalfctx, datafunc=None, path=None, flags=None,
2548 copied=None, ctx=None):
2549 copied=None, ctx=None):
2549 """originalfctx: filecontext to duplicate
2550 """originalfctx: filecontext to duplicate
2550
2551
2551 datafunc: None or a function to override data (file content). It is a
2552 datafunc: None or a function to override data (file content). It is a
2552 function to be lazy. path, flags, copied, ctx: None or overridden value
2553 function to be lazy. path, flags, copied, ctx: None or overridden value
2553
2554
2554 copied could be (path, rev), or False. copied could also be just path,
2555 copied could be (path, rev), or False. copied could also be just path,
2555 and will be converted to (path, nullid). This simplifies some callers.
2556 and will be converted to (path, nullid). This simplifies some callers.
2556 """
2557 """
2557
2558
2558 if path is None:
2559 if path is None:
2559 path = originalfctx.path()
2560 path = originalfctx.path()
2560 if ctx is None:
2561 if ctx is None:
2561 ctx = originalfctx.changectx()
2562 ctx = originalfctx.changectx()
2562 ctxmatch = lambda: True
2563 ctxmatch = lambda: True
2563 else:
2564 else:
2564 ctxmatch = lambda: ctx == originalfctx.changectx()
2565 ctxmatch = lambda: ctx == originalfctx.changectx()
2565
2566
2566 repo = originalfctx.repo()
2567 repo = originalfctx.repo()
2567 flog = originalfctx.filelog()
2568 flog = originalfctx.filelog()
2568 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2569 super(overlayfilectx, self).__init__(repo, path, flog, ctx)
2569
2570
2570 if copied is None:
2571 if copied is None:
2571 copied = originalfctx.renamed()
2572 copied = originalfctx.renamed()
2572 copiedmatch = lambda: True
2573 copiedmatch = lambda: True
2573 else:
2574 else:
2574 if copied and not isinstance(copied, tuple):
2575 if copied and not isinstance(copied, tuple):
2575 # repo._filecommit will recalculate copyrev so nullid is okay
2576 # repo._filecommit will recalculate copyrev so nullid is okay
2576 copied = (copied, nullid)
2577 copied = (copied, nullid)
2577 copiedmatch = lambda: copied == originalfctx.renamed()
2578 copiedmatch = lambda: copied == originalfctx.renamed()
2578
2579
2579 # When data, copied (could affect data), ctx (could affect filelog
2580 # When data, copied (could affect data), ctx (could affect filelog
2580 # parents) are not overridden, rawdata, rawflags, and filenode may be
2581 # parents) are not overridden, rawdata, rawflags, and filenode may be
2581 # reused (repo._filecommit should double check filelog parents).
2582 # reused (repo._filecommit should double check filelog parents).
2582 #
2583 #
2583 # path, flags are not hashed in filelog (but in manifestlog) so they do
2584 # path, flags are not hashed in filelog (but in manifestlog) so they do
2584 # not affect reusable here.
2585 # not affect reusable here.
2585 #
2586 #
2586 # If ctx or copied is overridden to a same value with originalfctx,
2587 # If ctx or copied is overridden to a same value with originalfctx,
2587 # still consider it's reusable. originalfctx.renamed() may be a bit
2588 # still consider it's reusable. originalfctx.renamed() may be a bit
2588 # expensive so it's not called unless necessary. Assuming datafunc is
2589 # expensive so it's not called unless necessary. Assuming datafunc is
2589 # always expensive, do not call it for this "reusable" test.
2590 # always expensive, do not call it for this "reusable" test.
2590 reusable = datafunc is None and ctxmatch() and copiedmatch()
2591 reusable = datafunc is None and ctxmatch() and copiedmatch()
2591
2592
2592 if datafunc is None:
2593 if datafunc is None:
2593 datafunc = originalfctx.data
2594 datafunc = originalfctx.data
2594 if flags is None:
2595 if flags is None:
2595 flags = originalfctx.flags()
2596 flags = originalfctx.flags()
2596
2597
2597 self._datafunc = datafunc
2598 self._datafunc = datafunc
2598 self._flags = flags
2599 self._flags = flags
2599 self._copied = copied
2600 self._copied = copied
2600
2601
2601 if reusable:
2602 if reusable:
2602 # copy extra fields from originalfctx
2603 # copy extra fields from originalfctx
2603 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2604 attrs = ['rawdata', 'rawflags', '_filenode', '_filerev']
2604 for attr_ in attrs:
2605 for attr_ in attrs:
2605 if util.safehasattr(originalfctx, attr_):
2606 if util.safehasattr(originalfctx, attr_):
2606 setattr(self, attr_, getattr(originalfctx, attr_))
2607 setattr(self, attr_, getattr(originalfctx, attr_))
2607
2608
2608 def data(self):
2609 def data(self):
2609 return self._datafunc()
2610 return self._datafunc()
2610
2611
2611 class metadataonlyctx(committablectx):
2612 class metadataonlyctx(committablectx):
2612 """Like memctx but it's reusing the manifest of different commit.
2613 """Like memctx but it's reusing the manifest of different commit.
2613 Intended to be used by lightweight operations that are creating
2614 Intended to be used by lightweight operations that are creating
2614 metadata-only changes.
2615 metadata-only changes.
2615
2616
2616 Revision information is supplied at initialization time. 'repo' is the
2617 Revision information is supplied at initialization time. 'repo' is the
2617 current localrepo, 'ctx' is original revision which manifest we're reuisng
2618 current localrepo, 'ctx' is original revision which manifest we're reuisng
2618 'parents' is a sequence of two parent revisions identifiers (pass None for
2619 'parents' is a sequence of two parent revisions identifiers (pass None for
2619 every missing parent), 'text' is the commit.
2620 every missing parent), 'text' is the commit.
2620
2621
2621 user receives the committer name and defaults to current repository
2622 user receives the committer name and defaults to current repository
2622 username, date is the commit date in any format supported by
2623 username, date is the commit date in any format supported by
2623 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2624 dateutil.parsedate() and defaults to current date, extra is a dictionary of
2624 metadata or is left empty.
2625 metadata or is left empty.
2625 """
2626 """
2626 def __new__(cls, repo, originalctx, *args, **kwargs):
2627 def __new__(cls, repo, originalctx, *args, **kwargs):
2627 return super(metadataonlyctx, cls).__new__(cls, repo)
2628 return super(metadataonlyctx, cls).__new__(cls, repo)
2628
2629
2629 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2630 def __init__(self, repo, originalctx, parents=None, text=None, user=None,
2630 date=None, extra=None, editor=False):
2631 date=None, extra=None, editor=False):
2631 if text is None:
2632 if text is None:
2632 text = originalctx.description()
2633 text = originalctx.description()
2633 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2634 super(metadataonlyctx, self).__init__(repo, text, user, date, extra)
2634 self._rev = None
2635 self._rev = None
2635 self._node = None
2636 self._node = None
2636 self._originalctx = originalctx
2637 self._originalctx = originalctx
2637 self._manifestnode = originalctx.manifestnode()
2638 self._manifestnode = originalctx.manifestnode()
2638 if parents is None:
2639 if parents is None:
2639 parents = originalctx.parents()
2640 parents = originalctx.parents()
2640 else:
2641 else:
2641 parents = [repo[p] for p in parents if p is not None]
2642 parents = [repo[p] for p in parents if p is not None]
2642 parents = parents[:]
2643 parents = parents[:]
2643 while len(parents) < 2:
2644 while len(parents) < 2:
2644 parents.append(repo[nullid])
2645 parents.append(repo[nullid])
2645 p1, p2 = self._parents = parents
2646 p1, p2 = self._parents = parents
2646
2647
2647 # sanity check to ensure that the reused manifest parents are
2648 # sanity check to ensure that the reused manifest parents are
2648 # manifests of our commit parents
2649 # manifests of our commit parents
2649 mp1, mp2 = self.manifestctx().parents
2650 mp1, mp2 = self.manifestctx().parents
2650 if p1 != nullid and p1.manifestnode() != mp1:
2651 if p1 != nullid and p1.manifestnode() != mp1:
2651 raise RuntimeError('can\'t reuse the manifest: '
2652 raise RuntimeError('can\'t reuse the manifest: '
2652 'its p1 doesn\'t match the new ctx p1')
2653 'its p1 doesn\'t match the new ctx p1')
2653 if p2 != nullid and p2.manifestnode() != mp2:
2654 if p2 != nullid and p2.manifestnode() != mp2:
2654 raise RuntimeError('can\'t reuse the manifest: '
2655 raise RuntimeError('can\'t reuse the manifest: '
2655 'its p2 doesn\'t match the new ctx p2')
2656 'its p2 doesn\'t match the new ctx p2')
2656
2657
2657 self._files = originalctx.files()
2658 self._files = originalctx.files()
2658 self.substate = {}
2659 self.substate = {}
2659
2660
2660 if editor:
2661 if editor:
2661 self._text = editor(self._repo, self, [])
2662 self._text = editor(self._repo, self, [])
2662 self._repo.savecommitmessage(self._text)
2663 self._repo.savecommitmessage(self._text)
2663
2664
2664 def manifestnode(self):
2665 def manifestnode(self):
2665 return self._manifestnode
2666 return self._manifestnode
2666
2667
2667 @property
2668 @property
2668 def _manifestctx(self):
2669 def _manifestctx(self):
2669 return self._repo.manifestlog[self._manifestnode]
2670 return self._repo.manifestlog[self._manifestnode]
2670
2671
2671 def filectx(self, path, filelog=None):
2672 def filectx(self, path, filelog=None):
2672 return self._originalctx.filectx(path, filelog=filelog)
2673 return self._originalctx.filectx(path, filelog=filelog)
2673
2674
2674 def commit(self):
2675 def commit(self):
2675 """commit context to the repo"""
2676 """commit context to the repo"""
2676 return self._repo.commitctx(self)
2677 return self._repo.commitctx(self)
2677
2678
2678 @property
2679 @property
2679 def _manifest(self):
2680 def _manifest(self):
2680 return self._originalctx.manifest()
2681 return self._originalctx.manifest()
2681
2682
2682 @propertycache
2683 @propertycache
2683 def _status(self):
2684 def _status(self):
2684 """Calculate exact status from ``files`` specified in the ``origctx``
2685 """Calculate exact status from ``files`` specified in the ``origctx``
2685 and parents manifests.
2686 and parents manifests.
2686 """
2687 """
2687 man1 = self.p1().manifest()
2688 man1 = self.p1().manifest()
2688 p2 = self._parents[1]
2689 p2 = self._parents[1]
2689 # "1 < len(self._parents)" can't be used for checking
2690 # "1 < len(self._parents)" can't be used for checking
2690 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2691 # existence of the 2nd parent, because "metadataonlyctx._parents" is
2691 # explicitly initialized by the list, of which length is 2.
2692 # explicitly initialized by the list, of which length is 2.
2692 if p2.node() != nullid:
2693 if p2.node() != nullid:
2693 man2 = p2.manifest()
2694 man2 = p2.manifest()
2694 managing = lambda f: f in man1 or f in man2
2695 managing = lambda f: f in man1 or f in man2
2695 else:
2696 else:
2696 managing = lambda f: f in man1
2697 managing = lambda f: f in man1
2697
2698
2698 modified, added, removed = [], [], []
2699 modified, added, removed = [], [], []
2699 for f in self._files:
2700 for f in self._files:
2700 if not managing(f):
2701 if not managing(f):
2701 added.append(f)
2702 added.append(f)
2702 elif f in self:
2703 elif f in self:
2703 modified.append(f)
2704 modified.append(f)
2704 else:
2705 else:
2705 removed.append(f)
2706 removed.append(f)
2706
2707
2707 return scmutil.status(modified, added, removed, [], [], [], [])
2708 return scmutil.status(modified, added, removed, [], [], [], [])
2708
2709
2709 class arbitraryfilectx(object):
2710 class arbitraryfilectx(object):
2710 """Allows you to use filectx-like functions on a file in an arbitrary
2711 """Allows you to use filectx-like functions on a file in an arbitrary
2711 location on disk, possibly not in the working directory.
2712 location on disk, possibly not in the working directory.
2712 """
2713 """
2713 def __init__(self, path, repo=None):
2714 def __init__(self, path, repo=None):
2714 # Repo is optional because contrib/simplemerge uses this class.
2715 # Repo is optional because contrib/simplemerge uses this class.
2715 self._repo = repo
2716 self._repo = repo
2716 self._path = path
2717 self._path = path
2717
2718
2718 def cmp(self, fctx):
2719 def cmp(self, fctx):
2719 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2720 # filecmp follows symlinks whereas `cmp` should not, so skip the fast
2720 # path if either side is a symlink.
2721 # path if either side is a symlink.
2721 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2722 symlinks = ('l' in self.flags() or 'l' in fctx.flags())
2722 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2723 if not symlinks and isinstance(fctx, workingfilectx) and self._repo:
2723 # Add a fast-path for merge if both sides are disk-backed.
2724 # Add a fast-path for merge if both sides are disk-backed.
2724 # Note that filecmp uses the opposite return values (True if same)
2725 # Note that filecmp uses the opposite return values (True if same)
2725 # from our cmp functions (True if different).
2726 # from our cmp functions (True if different).
2726 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2727 return not filecmp.cmp(self.path(), self._repo.wjoin(fctx.path()))
2727 return self.data() != fctx.data()
2728 return self.data() != fctx.data()
2728
2729
2729 def path(self):
2730 def path(self):
2730 return self._path
2731 return self._path
2731
2732
2732 def flags(self):
2733 def flags(self):
2733 return ''
2734 return ''
2734
2735
2735 def data(self):
2736 def data(self):
2736 return util.readfile(self._path)
2737 return util.readfile(self._path)
2737
2738
2738 def decodeddata(self):
2739 def decodeddata(self):
2739 with open(self._path, "rb") as f:
2740 with open(self._path, "rb") as f:
2740 return f.read()
2741 return f.read()
2741
2742
2742 def remove(self):
2743 def remove(self):
2743 util.unlink(self._path)
2744 util.unlink(self._path)
2744
2745
2745 def write(self, data, flags, **kwargs):
2746 def write(self, data, flags, **kwargs):
2746 assert not flags
2747 assert not flags
2747 with open(self._path, "w") as f:
2748 with open(self._path, "w") as f:
2748 f.write(data)
2749 f.write(data)
@@ -1,1788 +1,1808 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22 from .i18n import _
22 from .i18n import _
23 from . import (
23 from . import (
24 cmdutil,
24 cmdutil,
25 encoding,
25 encoding,
26 error,
26 error,
27 exchange,
27 exchange,
28 logcmdutil,
28 logcmdutil,
29 match as matchmod,
29 match as matchmod,
30 node,
30 node,
31 pathutil,
31 pathutil,
32 phases,
32 phases,
33 pycompat,
33 pycompat,
34 scmutil,
34 scmutil,
35 subrepoutil,
35 subrepoutil,
36 util,
36 util,
37 vfs as vfsmod,
37 vfs as vfsmod,
38 )
38 )
39 from .utils import dateutil
39 from .utils import dateutil
40
40
41 hg = None
41 hg = None
42 reporelpath = subrepoutil.reporelpath
42 reporelpath = subrepoutil.reporelpath
43 subrelpath = subrepoutil.subrelpath
43 subrelpath = subrepoutil.subrelpath
44 _abssource = subrepoutil._abssource
44 _abssource = subrepoutil._abssource
45 propertycache = util.propertycache
45 propertycache = util.propertycache
46
46
47 def _expandedabspath(path):
47 def _expandedabspath(path):
48 '''
48 '''
49 get a path or url and if it is a path expand it and return an absolute path
49 get a path or url and if it is a path expand it and return an absolute path
50 '''
50 '''
51 expandedpath = util.urllocalpath(util.expandpath(path))
51 expandedpath = util.urllocalpath(util.expandpath(path))
52 u = util.url(expandedpath)
52 u = util.url(expandedpath)
53 if not u.scheme:
53 if not u.scheme:
54 path = util.normpath(os.path.abspath(u.path))
54 path = util.normpath(os.path.abspath(u.path))
55 return path
55 return path
56
56
57 def _getstorehashcachename(remotepath):
57 def _getstorehashcachename(remotepath):
58 '''get a unique filename for the store hash cache of a remote repository'''
58 '''get a unique filename for the store hash cache of a remote repository'''
59 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
59 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
60
60
61 class SubrepoAbort(error.Abort):
61 class SubrepoAbort(error.Abort):
62 """Exception class used to avoid handling a subrepo error more than once"""
62 """Exception class used to avoid handling a subrepo error more than once"""
63 def __init__(self, *args, **kw):
63 def __init__(self, *args, **kw):
64 self.subrepo = kw.pop(r'subrepo', None)
64 self.subrepo = kw.pop(r'subrepo', None)
65 self.cause = kw.pop(r'cause', None)
65 self.cause = kw.pop(r'cause', None)
66 error.Abort.__init__(self, *args, **kw)
66 error.Abort.__init__(self, *args, **kw)
67
67
68 def annotatesubrepoerror(func):
68 def annotatesubrepoerror(func):
69 def decoratedmethod(self, *args, **kargs):
69 def decoratedmethod(self, *args, **kargs):
70 try:
70 try:
71 res = func(self, *args, **kargs)
71 res = func(self, *args, **kargs)
72 except SubrepoAbort as ex:
72 except SubrepoAbort as ex:
73 # This exception has already been handled
73 # This exception has already been handled
74 raise ex
74 raise ex
75 except error.Abort as ex:
75 except error.Abort as ex:
76 subrepo = subrelpath(self)
76 subrepo = subrelpath(self)
77 errormsg = (util.forcebytestr(ex) + ' '
77 errormsg = (util.forcebytestr(ex) + ' '
78 + _('(in subrepository "%s")') % subrepo)
78 + _('(in subrepository "%s")') % subrepo)
79 # avoid handling this exception by raising a SubrepoAbort exception
79 # avoid handling this exception by raising a SubrepoAbort exception
80 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
80 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
81 cause=sys.exc_info())
81 cause=sys.exc_info())
82 return res
82 return res
83 return decoratedmethod
83 return decoratedmethod
84
84
85 def _updateprompt(ui, sub, dirty, local, remote):
85 def _updateprompt(ui, sub, dirty, local, remote):
86 if dirty:
86 if dirty:
87 msg = (_(' subrepository sources for %s differ\n'
87 msg = (_(' subrepository sources for %s differ\n'
88 'use (l)ocal source (%s) or (r)emote source (%s)?'
88 'use (l)ocal source (%s) or (r)emote source (%s)?'
89 '$$ &Local $$ &Remote')
89 '$$ &Local $$ &Remote')
90 % (subrelpath(sub), local, remote))
90 % (subrelpath(sub), local, remote))
91 else:
91 else:
92 msg = (_(' subrepository sources for %s differ (in checked out '
92 msg = (_(' subrepository sources for %s differ (in checked out '
93 'version)\n'
93 'version)\n'
94 'use (l)ocal source (%s) or (r)emote source (%s)?'
94 'use (l)ocal source (%s) or (r)emote source (%s)?'
95 '$$ &Local $$ &Remote')
95 '$$ &Local $$ &Remote')
96 % (subrelpath(sub), local, remote))
96 % (subrelpath(sub), local, remote))
97 return ui.promptchoice(msg, 0)
97 return ui.promptchoice(msg, 0)
98
98
99 def _sanitize(ui, vfs, ignore):
99 def _sanitize(ui, vfs, ignore):
100 for dirname, dirs, names in vfs.walk():
100 for dirname, dirs, names in vfs.walk():
101 for i, d in enumerate(dirs):
101 for i, d in enumerate(dirs):
102 if d.lower() == ignore:
102 if d.lower() == ignore:
103 del dirs[i]
103 del dirs[i]
104 break
104 break
105 if vfs.basename(dirname).lower() != '.hg':
105 if vfs.basename(dirname).lower() != '.hg':
106 continue
106 continue
107 for f in names:
107 for f in names:
108 if f.lower() == 'hgrc':
108 if f.lower() == 'hgrc':
109 ui.warn(_("warning: removing potentially hostile 'hgrc' "
109 ui.warn(_("warning: removing potentially hostile 'hgrc' "
110 "in '%s'\n") % vfs.join(dirname))
110 "in '%s'\n") % vfs.join(dirname))
111 vfs.unlink(vfs.reljoin(dirname, f))
111 vfs.unlink(vfs.reljoin(dirname, f))
112
112
113 def _auditsubrepopath(repo, path):
113 def _auditsubrepopath(repo, path):
114 # auditor doesn't check if the path itself is a symlink
114 # auditor doesn't check if the path itself is a symlink
115 pathutil.pathauditor(repo.root)(path)
115 pathutil.pathauditor(repo.root)(path)
116 if repo.wvfs.islink(path):
116 if repo.wvfs.islink(path):
117 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
117 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
118
118
119 SUBREPO_ALLOWED_DEFAULTS = {
119 SUBREPO_ALLOWED_DEFAULTS = {
120 'hg': True,
120 'hg': True,
121 'git': False,
121 'git': False,
122 'svn': False,
122 'svn': False,
123 }
123 }
124
124
125 def _checktype(ui, kind):
125 def _checktype(ui, kind):
126 # subrepos.allowed is a master kill switch. If disabled, subrepos are
126 # subrepos.allowed is a master kill switch. If disabled, subrepos are
127 # disabled period.
127 # disabled period.
128 if not ui.configbool('subrepos', 'allowed', True):
128 if not ui.configbool('subrepos', 'allowed', True):
129 raise error.Abort(_('subrepos not enabled'),
129 raise error.Abort(_('subrepos not enabled'),
130 hint=_("see 'hg help config.subrepos' for details"))
130 hint=_("see 'hg help config.subrepos' for details"))
131
131
132 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
132 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
133 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
133 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
134 raise error.Abort(_('%s subrepos not allowed') % kind,
134 raise error.Abort(_('%s subrepos not allowed') % kind,
135 hint=_("see 'hg help config.subrepos' for details"))
135 hint=_("see 'hg help config.subrepos' for details"))
136
136
137 if kind not in types:
137 if kind not in types:
138 raise error.Abort(_('unknown subrepo type %s') % kind)
138 raise error.Abort(_('unknown subrepo type %s') % kind)
139
139
140 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
140 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
141 """return instance of the right subrepo class for subrepo in path"""
141 """return instance of the right subrepo class for subrepo in path"""
142 # subrepo inherently violates our import layering rules
142 # subrepo inherently violates our import layering rules
143 # because it wants to make repo objects from deep inside the stack
143 # because it wants to make repo objects from deep inside the stack
144 # so we manually delay the circular imports to not break
144 # so we manually delay the circular imports to not break
145 # scripts that don't use our demand-loading
145 # scripts that don't use our demand-loading
146 global hg
146 global hg
147 from . import hg as h
147 from . import hg as h
148 hg = h
148 hg = h
149
149
150 repo = ctx.repo()
150 repo = ctx.repo()
151 _auditsubrepopath(repo, path)
151 _auditsubrepopath(repo, path)
152 state = ctx.substate[path]
152 state = ctx.substate[path]
153 _checktype(repo.ui, state[2])
153 _checktype(repo.ui, state[2])
154 if allowwdir:
154 if allowwdir:
155 state = (state[0], ctx.subrev(path), state[2])
155 state = (state[0], ctx.subrev(path), state[2])
156 return types[state[2]](ctx, path, state[:2], allowcreate)
156 return types[state[2]](ctx, path, state[:2], allowcreate)
157
157
158 def nullsubrepo(ctx, path, pctx):
158 def nullsubrepo(ctx, path, pctx):
159 """return an empty subrepo in pctx for the extant subrepo in ctx"""
159 """return an empty subrepo in pctx for the extant subrepo in ctx"""
160 # subrepo inherently violates our import layering rules
160 # subrepo inherently violates our import layering rules
161 # because it wants to make repo objects from deep inside the stack
161 # because it wants to make repo objects from deep inside the stack
162 # so we manually delay the circular imports to not break
162 # so we manually delay the circular imports to not break
163 # scripts that don't use our demand-loading
163 # scripts that don't use our demand-loading
164 global hg
164 global hg
165 from . import hg as h
165 from . import hg as h
166 hg = h
166 hg = h
167
167
168 repo = ctx.repo()
168 repo = ctx.repo()
169 _auditsubrepopath(repo, path)
169 _auditsubrepopath(repo, path)
170 state = ctx.substate[path]
170 state = ctx.substate[path]
171 _checktype(repo.ui, state[2])
171 _checktype(repo.ui, state[2])
172 subrev = ''
172 subrev = ''
173 if state[2] == 'hg':
173 if state[2] == 'hg':
174 subrev = "0" * 40
174 subrev = "0" * 40
175 return types[state[2]](pctx, path, (state[0], subrev), True)
175 return types[state[2]](pctx, path, (state[0], subrev), True)
176
176
177 # subrepo classes need to implement the following abstract class:
177 # subrepo classes need to implement the following abstract class:
178
178
179 class abstractsubrepo(object):
179 class abstractsubrepo(object):
180
180
181 def __init__(self, ctx, path):
181 def __init__(self, ctx, path):
182 """Initialize abstractsubrepo part
182 """Initialize abstractsubrepo part
183
183
184 ``ctx`` is the context referring this subrepository in the
184 ``ctx`` is the context referring this subrepository in the
185 parent repository.
185 parent repository.
186
186
187 ``path`` is the path to this subrepository as seen from
187 ``path`` is the path to this subrepository as seen from
188 innermost repository.
188 innermost repository.
189 """
189 """
190 self.ui = ctx.repo().ui
190 self.ui = ctx.repo().ui
191 self._ctx = ctx
191 self._ctx = ctx
192 self._path = path
192 self._path = path
193
193
194 def addwebdirpath(self, serverpath, webconf):
194 def addwebdirpath(self, serverpath, webconf):
195 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
195 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
196
196
197 ``serverpath`` is the path component of the URL for this repo.
197 ``serverpath`` is the path component of the URL for this repo.
198
198
199 ``webconf`` is the dictionary of hgwebdir entries.
199 ``webconf`` is the dictionary of hgwebdir entries.
200 """
200 """
201 pass
201 pass
202
202
203 def storeclean(self, path):
203 def storeclean(self, path):
204 """
204 """
205 returns true if the repository has not changed since it was last
205 returns true if the repository has not changed since it was last
206 cloned from or pushed to a given repository.
206 cloned from or pushed to a given repository.
207 """
207 """
208 return False
208 return False
209
209
210 def dirty(self, ignoreupdate=False, missing=False):
210 def dirty(self, ignoreupdate=False, missing=False):
211 """returns true if the dirstate of the subrepo is dirty or does not
211 """returns true if the dirstate of the subrepo is dirty or does not
212 match current stored state. If ignoreupdate is true, only check
212 match current stored state. If ignoreupdate is true, only check
213 whether the subrepo has uncommitted changes in its dirstate. If missing
213 whether the subrepo has uncommitted changes in its dirstate. If missing
214 is true, check for deleted files.
214 is true, check for deleted files.
215 """
215 """
216 raise NotImplementedError
216 raise NotImplementedError
217
217
218 def dirtyreason(self, ignoreupdate=False, missing=False):
218 def dirtyreason(self, ignoreupdate=False, missing=False):
219 """return reason string if it is ``dirty()``
219 """return reason string if it is ``dirty()``
220
220
221 Returned string should have enough information for the message
221 Returned string should have enough information for the message
222 of exception.
222 of exception.
223
223
224 This returns None, otherwise.
224 This returns None, otherwise.
225 """
225 """
226 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
226 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
227 return _('uncommitted changes in subrepository "%s"'
227 return _('uncommitted changes in subrepository "%s"'
228 ) % subrelpath(self)
228 ) % subrelpath(self)
229
229
230 def bailifchanged(self, ignoreupdate=False, hint=None):
230 def bailifchanged(self, ignoreupdate=False, hint=None):
231 """raise Abort if subrepository is ``dirty()``
231 """raise Abort if subrepository is ``dirty()``
232 """
232 """
233 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
233 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
234 missing=True)
234 missing=True)
235 if dirtyreason:
235 if dirtyreason:
236 raise error.Abort(dirtyreason, hint=hint)
236 raise error.Abort(dirtyreason, hint=hint)
237
237
238 def basestate(self):
238 def basestate(self):
239 """current working directory base state, disregarding .hgsubstate
239 """current working directory base state, disregarding .hgsubstate
240 state and working directory modifications"""
240 state and working directory modifications"""
241 raise NotImplementedError
241 raise NotImplementedError
242
242
243 def checknested(self, path):
243 def checknested(self, path):
244 """check if path is a subrepository within this repository"""
244 """check if path is a subrepository within this repository"""
245 return False
245 return False
246
246
247 def commit(self, text, user, date):
247 def commit(self, text, user, date):
248 """commit the current changes to the subrepo with the given
248 """commit the current changes to the subrepo with the given
249 log message. Use given user and date if possible. Return the
249 log message. Use given user and date if possible. Return the
250 new state of the subrepo.
250 new state of the subrepo.
251 """
251 """
252 raise NotImplementedError
252 raise NotImplementedError
253
253
254 def phase(self, state):
254 def phase(self, state):
255 """returns phase of specified state in the subrepository.
255 """returns phase of specified state in the subrepository.
256 """
256 """
257 return phases.public
257 return phases.public
258
258
259 def remove(self):
259 def remove(self):
260 """remove the subrepo
260 """remove the subrepo
261
261
262 (should verify the dirstate is not dirty first)
262 (should verify the dirstate is not dirty first)
263 """
263 """
264 raise NotImplementedError
264 raise NotImplementedError
265
265
266 def get(self, state, overwrite=False):
266 def get(self, state, overwrite=False):
267 """run whatever commands are needed to put the subrepo into
267 """run whatever commands are needed to put the subrepo into
268 this state
268 this state
269 """
269 """
270 raise NotImplementedError
270 raise NotImplementedError
271
271
272 def merge(self, state):
272 def merge(self, state):
273 """merge currently-saved state with the new state."""
273 """merge currently-saved state with the new state."""
274 raise NotImplementedError
274 raise NotImplementedError
275
275
276 def push(self, opts):
276 def push(self, opts):
277 """perform whatever action is analogous to 'hg push'
277 """perform whatever action is analogous to 'hg push'
278
278
279 This may be a no-op on some systems.
279 This may be a no-op on some systems.
280 """
280 """
281 raise NotImplementedError
281 raise NotImplementedError
282
282
283 def add(self, ui, match, prefix, explicitonly, **opts):
283 def add(self, ui, match, prefix, explicitonly, **opts):
284 return []
284 return []
285
285
286 def addremove(self, matcher, prefix, opts, dry_run, similarity):
286 def addremove(self, matcher, prefix, opts, dry_run, similarity):
287 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
287 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
288 return 1
288 return 1
289
289
290 def cat(self, match, fm, fntemplate, prefix, **opts):
290 def cat(self, match, fm, fntemplate, prefix, **opts):
291 return 1
291 return 1
292
292
293 def status(self, rev2, **opts):
293 def status(self, rev2, **opts):
294 return scmutil.status([], [], [], [], [], [], [])
294 return scmutil.status([], [], [], [], [], [], [])
295
295
296 def diff(self, ui, diffopts, node2, match, prefix, **opts):
296 def diff(self, ui, diffopts, node2, match, prefix, **opts):
297 pass
297 pass
298
298
299 def outgoing(self, ui, dest, opts):
299 def outgoing(self, ui, dest, opts):
300 return 1
300 return 1
301
301
302 def incoming(self, ui, source, opts):
302 def incoming(self, ui, source, opts):
303 return 1
303 return 1
304
304
305 def files(self):
305 def files(self):
306 """return filename iterator"""
306 """return filename iterator"""
307 raise NotImplementedError
307 raise NotImplementedError
308
308
309 def filedata(self, name, decode):
309 def filedata(self, name, decode):
310 """return file data, optionally passed through repo decoders"""
310 """return file data, optionally passed through repo decoders"""
311 raise NotImplementedError
311 raise NotImplementedError
312
312
313 def fileflags(self, name):
313 def fileflags(self, name):
314 """return file flags"""
314 """return file flags"""
315 return ''
315 return ''
316
316
317 def getfileset(self, expr):
317 def getfileset(self, expr):
318 """Resolve the fileset expression for this repo"""
318 """Resolve the fileset expression for this repo"""
319 return set()
319 return set()
320
320
321 def printfiles(self, ui, m, fm, fmt, subrepos):
321 def printfiles(self, ui, m, fm, fmt, subrepos):
322 """handle the files command for this subrepo"""
322 """handle the files command for this subrepo"""
323 return 1
323 return 1
324
324
325 def archive(self, archiver, prefix, match=None, decode=True):
325 def archive(self, archiver, prefix, match=None, decode=True):
326 if match is not None:
326 if match is not None:
327 files = [f for f in self.files() if match(f)]
327 files = [f for f in self.files() if match(f)]
328 else:
328 else:
329 files = self.files()
329 files = self.files()
330 total = len(files)
330 total = len(files)
331 relpath = subrelpath(self)
331 relpath = subrelpath(self)
332 self.ui.progress(_('archiving (%s)') % relpath, 0,
332 self.ui.progress(_('archiving (%s)') % relpath, 0,
333 unit=_('files'), total=total)
333 unit=_('files'), total=total)
334 for i, name in enumerate(files):
334 for i, name in enumerate(files):
335 flags = self.fileflags(name)
335 flags = self.fileflags(name)
336 mode = 'x' in flags and 0o755 or 0o644
336 mode = 'x' in flags and 0o755 or 0o644
337 symlink = 'l' in flags
337 symlink = 'l' in flags
338 archiver.addfile(prefix + self._path + '/' + name,
338 archiver.addfile(prefix + self._path + '/' + name,
339 mode, symlink, self.filedata(name, decode))
339 mode, symlink, self.filedata(name, decode))
340 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
340 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
341 unit=_('files'), total=total)
341 unit=_('files'), total=total)
342 self.ui.progress(_('archiving (%s)') % relpath, None)
342 self.ui.progress(_('archiving (%s)') % relpath, None)
343 return total
343 return total
344
344
345 def walk(self, match):
345 def walk(self, match):
346 '''
346 '''
347 walk recursively through the directory tree, finding all files
347 walk recursively through the directory tree, finding all files
348 matched by the match function
348 matched by the match function
349 '''
349 '''
350
350
351 def forget(self, match, prefix):
351 def forget(self, match, prefix):
352 return ([], [])
352 return ([], [])
353
353
354 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
354 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
355 """remove the matched files from the subrepository and the filesystem,
355 """remove the matched files from the subrepository and the filesystem,
356 possibly by force and/or after the file has been removed from the
356 possibly by force and/or after the file has been removed from the
357 filesystem. Return 0 on success, 1 on any warning.
357 filesystem. Return 0 on success, 1 on any warning.
358 """
358 """
359 warnings.append(_("warning: removefiles not implemented (%s)")
359 warnings.append(_("warning: removefiles not implemented (%s)")
360 % self._path)
360 % self._path)
361 return 1
361 return 1
362
362
363 def revert(self, substate, *pats, **opts):
363 def revert(self, substate, *pats, **opts):
364 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
364 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
365 % (substate[0], substate[2]))
365 % (substate[0], substate[2]))
366 return []
366 return []
367
367
368 def shortid(self, revid):
368 def shortid(self, revid):
369 return revid
369 return revid
370
370
371 def unshare(self):
371 def unshare(self):
372 '''
372 '''
373 convert this repository from shared to normal storage.
373 convert this repository from shared to normal storage.
374 '''
374 '''
375
375
376 def verify(self):
376 def verify(self):
377 '''verify the integrity of the repository. Return 0 on success or
377 '''verify the integrity of the repository. Return 0 on success or
378 warning, 1 on any error.
378 warning, 1 on any error.
379 '''
379 '''
380 return 0
380 return 0
381
381
382 @propertycache
382 @propertycache
383 def wvfs(self):
383 def wvfs(self):
384 """return vfs to access the working directory of this subrepository
384 """return vfs to access the working directory of this subrepository
385 """
385 """
386 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
386 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
387
387
388 @propertycache
388 @propertycache
389 def _relpath(self):
389 def _relpath(self):
390 """return path to this subrepository as seen from outermost repository
390 """return path to this subrepository as seen from outermost repository
391 """
391 """
392 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
392 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
393
393
394 class hgsubrepo(abstractsubrepo):
394 class hgsubrepo(abstractsubrepo):
395 def __init__(self, ctx, path, state, allowcreate):
395 def __init__(self, ctx, path, state, allowcreate):
396 super(hgsubrepo, self).__init__(ctx, path)
396 super(hgsubrepo, self).__init__(ctx, path)
397 self._state = state
397 self._state = state
398 r = ctx.repo()
398 r = ctx.repo()
399 root = r.wjoin(path)
399 root = r.wjoin(path)
400 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
400 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
401 self._repo = hg.repository(r.baseui, root, create=create)
401 self._repo = hg.repository(r.baseui, root, create=create)
402
402
403 # Propagate the parent's --hidden option
403 # Propagate the parent's --hidden option
404 if r is r.unfiltered():
404 if r is r.unfiltered():
405 self._repo = self._repo.unfiltered()
405 self._repo = self._repo.unfiltered()
406
406
407 self.ui = self._repo.ui
407 self.ui = self._repo.ui
408 for s, k in [('ui', 'commitsubrepos')]:
408 for s, k in [('ui', 'commitsubrepos')]:
409 v = r.ui.config(s, k)
409 v = r.ui.config(s, k)
410 if v:
410 if v:
411 self.ui.setconfig(s, k, v, 'subrepo')
411 self.ui.setconfig(s, k, v, 'subrepo')
412 # internal config: ui._usedassubrepo
412 # internal config: ui._usedassubrepo
413 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
413 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
414 self._initrepo(r, state[0], create)
414 self._initrepo(r, state[0], create)
415
415
416 @annotatesubrepoerror
416 @annotatesubrepoerror
417 def addwebdirpath(self, serverpath, webconf):
417 def addwebdirpath(self, serverpath, webconf):
418 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
418 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
419
419
420 def storeclean(self, path):
420 def storeclean(self, path):
421 with self._repo.lock():
421 with self._repo.lock():
422 return self._storeclean(path)
422 return self._storeclean(path)
423
423
424 def _storeclean(self, path):
424 def _storeclean(self, path):
425 clean = True
425 clean = True
426 itercache = self._calcstorehash(path)
426 itercache = self._calcstorehash(path)
427 for filehash in self._readstorehashcache(path):
427 for filehash in self._readstorehashcache(path):
428 if filehash != next(itercache, None):
428 if filehash != next(itercache, None):
429 clean = False
429 clean = False
430 break
430 break
431 if clean:
431 if clean:
432 # if not empty:
432 # if not empty:
433 # the cached and current pull states have a different size
433 # the cached and current pull states have a different size
434 clean = next(itercache, None) is None
434 clean = next(itercache, None) is None
435 return clean
435 return clean
436
436
437 def _calcstorehash(self, remotepath):
437 def _calcstorehash(self, remotepath):
438 '''calculate a unique "store hash"
438 '''calculate a unique "store hash"
439
439
440 This method is used to to detect when there are changes that may
440 This method is used to to detect when there are changes that may
441 require a push to a given remote path.'''
441 require a push to a given remote path.'''
442 # sort the files that will be hashed in increasing (likely) file size
442 # sort the files that will be hashed in increasing (likely) file size
443 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
443 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
444 yield '# %s\n' % _expandedabspath(remotepath)
444 yield '# %s\n' % _expandedabspath(remotepath)
445 vfs = self._repo.vfs
445 vfs = self._repo.vfs
446 for relname in filelist:
446 for relname in filelist:
447 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
447 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
448 yield '%s = %s\n' % (relname, filehash)
448 yield '%s = %s\n' % (relname, filehash)
449
449
450 @propertycache
450 @propertycache
451 def _cachestorehashvfs(self):
451 def _cachestorehashvfs(self):
452 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
452 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
453
453
454 def _readstorehashcache(self, remotepath):
454 def _readstorehashcache(self, remotepath):
455 '''read the store hash cache for a given remote repository'''
455 '''read the store hash cache for a given remote repository'''
456 cachefile = _getstorehashcachename(remotepath)
456 cachefile = _getstorehashcachename(remotepath)
457 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
457 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
458
458
459 def _cachestorehash(self, remotepath):
459 def _cachestorehash(self, remotepath):
460 '''cache the current store hash
460 '''cache the current store hash
461
461
462 Each remote repo requires its own store hash cache, because a subrepo
462 Each remote repo requires its own store hash cache, because a subrepo
463 store may be "clean" versus a given remote repo, but not versus another
463 store may be "clean" versus a given remote repo, but not versus another
464 '''
464 '''
465 cachefile = _getstorehashcachename(remotepath)
465 cachefile = _getstorehashcachename(remotepath)
466 with self._repo.lock():
466 with self._repo.lock():
467 storehash = list(self._calcstorehash(remotepath))
467 storehash = list(self._calcstorehash(remotepath))
468 vfs = self._cachestorehashvfs
468 vfs = self._cachestorehashvfs
469 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
469 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
470
470
471 def _getctx(self):
471 def _getctx(self):
472 '''fetch the context for this subrepo revision, possibly a workingctx
472 '''fetch the context for this subrepo revision, possibly a workingctx
473 '''
473 '''
474 if self._ctx.rev() is None:
474 if self._ctx.rev() is None:
475 return self._repo[None] # workingctx if parent is workingctx
475 return self._repo[None] # workingctx if parent is workingctx
476 else:
476 else:
477 rev = self._state[1]
477 rev = self._state[1]
478 return self._repo[rev]
478 return self._repo[rev]
479
479
480 @annotatesubrepoerror
480 @annotatesubrepoerror
481 def _initrepo(self, parentrepo, source, create):
481 def _initrepo(self, parentrepo, source, create):
482 self._repo._subparent = parentrepo
482 self._repo._subparent = parentrepo
483 self._repo._subsource = source
483 self._repo._subsource = source
484
484
485 if create:
485 if create:
486 lines = ['[paths]\n']
486 lines = ['[paths]\n']
487
487
488 def addpathconfig(key, value):
488 def addpathconfig(key, value):
489 if value:
489 if value:
490 lines.append('%s = %s\n' % (key, value))
490 lines.append('%s = %s\n' % (key, value))
491 self.ui.setconfig('paths', key, value, 'subrepo')
491 self.ui.setconfig('paths', key, value, 'subrepo')
492
492
493 defpath = _abssource(self._repo, abort=False)
493 defpath = _abssource(self._repo, abort=False)
494 defpushpath = _abssource(self._repo, True, abort=False)
494 defpushpath = _abssource(self._repo, True, abort=False)
495 addpathconfig('default', defpath)
495 addpathconfig('default', defpath)
496 if defpath != defpushpath:
496 if defpath != defpushpath:
497 addpathconfig('default-push', defpushpath)
497 addpathconfig('default-push', defpushpath)
498
498
499 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
499 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
500
500
501 @annotatesubrepoerror
501 @annotatesubrepoerror
502 def add(self, ui, match, prefix, explicitonly, **opts):
502 def add(self, ui, match, prefix, explicitonly, **opts):
503 return cmdutil.add(ui, self._repo, match,
503 return cmdutil.add(ui, self._repo, match,
504 self.wvfs.reljoin(prefix, self._path),
504 self.wvfs.reljoin(prefix, self._path),
505 explicitonly, **opts)
505 explicitonly, **opts)
506
506
507 @annotatesubrepoerror
507 @annotatesubrepoerror
508 def addremove(self, m, prefix, opts, dry_run, similarity):
508 def addremove(self, m, prefix, opts, dry_run, similarity):
509 # In the same way as sub directories are processed, once in a subrepo,
509 # In the same way as sub directories are processed, once in a subrepo,
510 # always entry any of its subrepos. Don't corrupt the options that will
510 # always entry any of its subrepos. Don't corrupt the options that will
511 # be used to process sibling subrepos however.
511 # be used to process sibling subrepos however.
512 opts = copy.copy(opts)
512 opts = copy.copy(opts)
513 opts['subrepos'] = True
513 opts['subrepos'] = True
514 return scmutil.addremove(self._repo, m,
514 return scmutil.addremove(self._repo, m,
515 self.wvfs.reljoin(prefix, self._path), opts,
515 self.wvfs.reljoin(prefix, self._path), opts,
516 dry_run, similarity)
516 dry_run, similarity)
517
517
518 @annotatesubrepoerror
518 @annotatesubrepoerror
519 def cat(self, match, fm, fntemplate, prefix, **opts):
519 def cat(self, match, fm, fntemplate, prefix, **opts):
520 rev = self._state[1]
520 rev = self._state[1]
521 ctx = self._repo[rev]
521 ctx = self._repo[rev]
522 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
522 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
523 prefix, **opts)
523 prefix, **opts)
524
524
525 @annotatesubrepoerror
525 @annotatesubrepoerror
526 def status(self, rev2, **opts):
526 def status(self, rev2, **opts):
527 try:
527 try:
528 rev1 = self._state[1]
528 rev1 = self._state[1]
529 ctx1 = self._repo[rev1]
529 ctx1 = self._repo[rev1]
530 ctx2 = self._repo[rev2]
530 ctx2 = self._repo[rev2]
531 return self._repo.status(ctx1, ctx2, **opts)
531 return self._repo.status(ctx1, ctx2, **opts)
532 except error.RepoLookupError as inst:
532 except error.RepoLookupError as inst:
533 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
533 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
534 % (inst, subrelpath(self)))
534 % (inst, subrelpath(self)))
535 return scmutil.status([], [], [], [], [], [], [])
535 return scmutil.status([], [], [], [], [], [], [])
536
536
537 @annotatesubrepoerror
537 @annotatesubrepoerror
538 def diff(self, ui, diffopts, node2, match, prefix, **opts):
538 def diff(self, ui, diffopts, node2, match, prefix, **opts):
539 try:
539 try:
540 node1 = node.bin(self._state[1])
540 node1 = node.bin(self._state[1])
541 # We currently expect node2 to come from substate and be
541 # We currently expect node2 to come from substate and be
542 # in hex format
542 # in hex format
543 if node2 is not None:
543 if node2 is not None:
544 node2 = node.bin(node2)
544 node2 = node.bin(node2)
545 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
545 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
546 node1, node2, match,
546 node1, node2, match,
547 prefix=posixpath.join(prefix, self._path),
547 prefix=posixpath.join(prefix, self._path),
548 listsubrepos=True, **opts)
548 listsubrepos=True, **opts)
549 except error.RepoLookupError as inst:
549 except error.RepoLookupError as inst:
550 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
550 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
551 % (inst, subrelpath(self)))
551 % (inst, subrelpath(self)))
552
552
553 @annotatesubrepoerror
553 @annotatesubrepoerror
554 def archive(self, archiver, prefix, match=None, decode=True):
554 def archive(self, archiver, prefix, match=None, decode=True):
555 self._get(self._state + ('hg',))
555 self._get(self._state + ('hg',))
556 files = self.files()
556 files = self.files()
557 if match:
557 if match:
558 files = [f for f in files if match(f)]
558 files = [f for f in files if match(f)]
559 rev = self._state[1]
559 rev = self._state[1]
560 ctx = self._repo[rev]
560 ctx = self._repo[rev]
561 scmutil.fileprefetchhooks(self._repo, ctx, files)
561 scmutil.fileprefetchhooks(self._repo, ctx, files)
562 total = abstractsubrepo.archive(self, archiver, prefix, match)
562 total = abstractsubrepo.archive(self, archiver, prefix, match)
563 for subpath in ctx.substate:
563 for subpath in ctx.substate:
564 s = subrepo(ctx, subpath, True)
564 s = subrepo(ctx, subpath, True)
565 submatch = matchmod.subdirmatcher(subpath, match)
565 submatch = matchmod.subdirmatcher(subpath, match)
566 total += s.archive(archiver, prefix + self._path + '/', submatch,
566 total += s.archive(archiver, prefix + self._path + '/', submatch,
567 decode)
567 decode)
568 return total
568 return total
569
569
570 @annotatesubrepoerror
570 @annotatesubrepoerror
571 def dirty(self, ignoreupdate=False, missing=False):
571 def dirty(self, ignoreupdate=False, missing=False):
572 r = self._state[1]
572 r = self._state[1]
573 if r == '' and not ignoreupdate: # no state recorded
573 if r == '' and not ignoreupdate: # no state recorded
574 return True
574 return True
575 w = self._repo[None]
575 w = self._repo[None]
576 if r != w.p1().hex() and not ignoreupdate:
576 if r != w.p1().hex() and not ignoreupdate:
577 # different version checked out
577 # different version checked out
578 return True
578 return True
579 return w.dirty(missing=missing) # working directory changed
579 return w.dirty(missing=missing) # working directory changed
580
580
581 def basestate(self):
581 def basestate(self):
582 return self._repo['.'].hex()
582 return self._repo['.'].hex()
583
583
584 def checknested(self, path):
584 def checknested(self, path):
585 return self._repo._checknested(self._repo.wjoin(path))
585 return self._repo._checknested(self._repo.wjoin(path))
586
586
587 @annotatesubrepoerror
587 @annotatesubrepoerror
588 def commit(self, text, user, date):
588 def commit(self, text, user, date):
589 # don't bother committing in the subrepo if it's only been
589 # don't bother committing in the subrepo if it's only been
590 # updated
590 # updated
591 if not self.dirty(True):
591 if not self.dirty(True):
592 return self._repo['.'].hex()
592 return self._repo['.'].hex()
593 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
593 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
594 n = self._repo.commit(text, user, date)
594 n = self._repo.commit(text, user, date)
595 if not n:
595 if not n:
596 return self._repo['.'].hex() # different version checked out
596 return self._repo['.'].hex() # different version checked out
597 return node.hex(n)
597 return node.hex(n)
598
598
599 @annotatesubrepoerror
599 @annotatesubrepoerror
600 def phase(self, state):
600 def phase(self, state):
601 return self._repo[state].phase()
601 return self._repo[state].phase()
602
602
603 @annotatesubrepoerror
603 @annotatesubrepoerror
604 def remove(self):
604 def remove(self):
605 # we can't fully delete the repository as it may contain
605 # we can't fully delete the repository as it may contain
606 # local-only history
606 # local-only history
607 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
607 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
608 hg.clean(self._repo, node.nullid, False)
608 hg.clean(self._repo, node.nullid, False)
609
609
610 def _get(self, state):
610 def _get(self, state):
611 source, revision, kind = state
611 source, revision, kind = state
612 parentrepo = self._repo._subparent
612 parentrepo = self._repo._subparent
613
613
614 if revision in self._repo.unfiltered():
614 if revision in self._repo.unfiltered():
615 # Allow shared subrepos tracked at null to setup the sharedpath
615 # Allow shared subrepos tracked at null to setup the sharedpath
616 if len(self._repo) != 0 or not parentrepo.shared():
616 if len(self._repo) != 0 or not parentrepo.shared():
617 return True
617 return True
618 self._repo._subsource = source
618 self._repo._subsource = source
619 srcurl = _abssource(self._repo)
619 srcurl = _abssource(self._repo)
620 other = hg.peer(self._repo, {}, srcurl)
620 other = hg.peer(self._repo, {}, srcurl)
621 if len(self._repo) == 0:
621 if len(self._repo) == 0:
622 # use self._repo.vfs instead of self.wvfs to remove .hg only
622 # use self._repo.vfs instead of self.wvfs to remove .hg only
623 self._repo.vfs.rmtree()
623 self._repo.vfs.rmtree()
624 if parentrepo.shared():
624
625 # A remote subrepo could be shared if there is a local copy
626 # relative to the parent's share source. But clone pooling doesn't
627 # assemble the repos in a tree, so that can't be consistently done.
628 # A simpler option is for the user to configure clone pooling, and
629 # work with that.
630 if parentrepo.shared() and hg.islocal(srcurl):
625 self.ui.status(_('sharing subrepo %s from %s\n')
631 self.ui.status(_('sharing subrepo %s from %s\n')
626 % (subrelpath(self), srcurl))
632 % (subrelpath(self), srcurl))
627 shared = hg.share(self._repo._subparent.baseui,
633 shared = hg.share(self._repo._subparent.baseui,
628 other, self._repo.root,
634 other, self._repo.root,
629 update=False, bookmarks=False)
635 update=False, bookmarks=False)
630 self._repo = shared.local()
636 self._repo = shared.local()
631 else:
637 else:
638 # TODO: find a common place for this and this code in the
639 # share.py wrap of the clone command.
640 if parentrepo.shared():
641 pool = self.ui.config('share', 'pool')
642 if pool:
643 pool = util.expandpath(pool)
644
645 shareopts = {
646 'pool': pool,
647 'mode': self.ui.config('share', 'poolnaming'),
648 }
649 else:
650 shareopts = {}
651
632 self.ui.status(_('cloning subrepo %s from %s\n')
652 self.ui.status(_('cloning subrepo %s from %s\n')
633 % (subrelpath(self), srcurl))
653 % (subrelpath(self), srcurl))
634 other, cloned = hg.clone(self._repo._subparent.baseui, {},
654 other, cloned = hg.clone(self._repo._subparent.baseui, {},
635 other, self._repo.root,
655 other, self._repo.root,
636 update=False)
656 update=False, shareopts=shareopts)
637 self._repo = cloned.local()
657 self._repo = cloned.local()
638 self._initrepo(parentrepo, source, create=True)
658 self._initrepo(parentrepo, source, create=True)
639 self._cachestorehash(srcurl)
659 self._cachestorehash(srcurl)
640 else:
660 else:
641 self.ui.status(_('pulling subrepo %s from %s\n')
661 self.ui.status(_('pulling subrepo %s from %s\n')
642 % (subrelpath(self), srcurl))
662 % (subrelpath(self), srcurl))
643 cleansub = self.storeclean(srcurl)
663 cleansub = self.storeclean(srcurl)
644 exchange.pull(self._repo, other)
664 exchange.pull(self._repo, other)
645 if cleansub:
665 if cleansub:
646 # keep the repo clean after pull
666 # keep the repo clean after pull
647 self._cachestorehash(srcurl)
667 self._cachestorehash(srcurl)
648 return False
668 return False
649
669
650 @annotatesubrepoerror
670 @annotatesubrepoerror
651 def get(self, state, overwrite=False):
671 def get(self, state, overwrite=False):
652 inrepo = self._get(state)
672 inrepo = self._get(state)
653 source, revision, kind = state
673 source, revision, kind = state
654 repo = self._repo
674 repo = self._repo
655 repo.ui.debug("getting subrepo %s\n" % self._path)
675 repo.ui.debug("getting subrepo %s\n" % self._path)
656 if inrepo:
676 if inrepo:
657 urepo = repo.unfiltered()
677 urepo = repo.unfiltered()
658 ctx = urepo[revision]
678 ctx = urepo[revision]
659 if ctx.hidden():
679 if ctx.hidden():
660 urepo.ui.warn(
680 urepo.ui.warn(
661 _('revision %s in subrepository "%s" is hidden\n') \
681 _('revision %s in subrepository "%s" is hidden\n') \
662 % (revision[0:12], self._path))
682 % (revision[0:12], self._path))
663 repo = urepo
683 repo = urepo
664 hg.updaterepo(repo, revision, overwrite)
684 hg.updaterepo(repo, revision, overwrite)
665
685
666 @annotatesubrepoerror
686 @annotatesubrepoerror
667 def merge(self, state):
687 def merge(self, state):
668 self._get(state)
688 self._get(state)
669 cur = self._repo['.']
689 cur = self._repo['.']
670 dst = self._repo[state[1]]
690 dst = self._repo[state[1]]
671 anc = dst.ancestor(cur)
691 anc = dst.ancestor(cur)
672
692
673 def mergefunc():
693 def mergefunc():
674 if anc == cur and dst.branch() == cur.branch():
694 if anc == cur and dst.branch() == cur.branch():
675 self.ui.debug('updating subrepository "%s"\n'
695 self.ui.debug('updating subrepository "%s"\n'
676 % subrelpath(self))
696 % subrelpath(self))
677 hg.update(self._repo, state[1])
697 hg.update(self._repo, state[1])
678 elif anc == dst:
698 elif anc == dst:
679 self.ui.debug('skipping subrepository "%s"\n'
699 self.ui.debug('skipping subrepository "%s"\n'
680 % subrelpath(self))
700 % subrelpath(self))
681 else:
701 else:
682 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
702 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
683 hg.merge(self._repo, state[1], remind=False)
703 hg.merge(self._repo, state[1], remind=False)
684
704
685 wctx = self._repo[None]
705 wctx = self._repo[None]
686 if self.dirty():
706 if self.dirty():
687 if anc != dst:
707 if anc != dst:
688 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
708 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
689 mergefunc()
709 mergefunc()
690 else:
710 else:
691 mergefunc()
711 mergefunc()
692 else:
712 else:
693 mergefunc()
713 mergefunc()
694
714
695 @annotatesubrepoerror
715 @annotatesubrepoerror
696 def push(self, opts):
716 def push(self, opts):
697 force = opts.get('force')
717 force = opts.get('force')
698 newbranch = opts.get('new_branch')
718 newbranch = opts.get('new_branch')
699 ssh = opts.get('ssh')
719 ssh = opts.get('ssh')
700
720
701 # push subrepos depth-first for coherent ordering
721 # push subrepos depth-first for coherent ordering
702 c = self._repo['']
722 c = self._repo['']
703 subs = c.substate # only repos that are committed
723 subs = c.substate # only repos that are committed
704 for s in sorted(subs):
724 for s in sorted(subs):
705 if c.sub(s).push(opts) == 0:
725 if c.sub(s).push(opts) == 0:
706 return False
726 return False
707
727
708 dsturl = _abssource(self._repo, True)
728 dsturl = _abssource(self._repo, True)
709 if not force:
729 if not force:
710 if self.storeclean(dsturl):
730 if self.storeclean(dsturl):
711 self.ui.status(
731 self.ui.status(
712 _('no changes made to subrepo %s since last push to %s\n')
732 _('no changes made to subrepo %s since last push to %s\n')
713 % (subrelpath(self), dsturl))
733 % (subrelpath(self), dsturl))
714 return None
734 return None
715 self.ui.status(_('pushing subrepo %s to %s\n') %
735 self.ui.status(_('pushing subrepo %s to %s\n') %
716 (subrelpath(self), dsturl))
736 (subrelpath(self), dsturl))
717 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
737 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
718 res = exchange.push(self._repo, other, force, newbranch=newbranch)
738 res = exchange.push(self._repo, other, force, newbranch=newbranch)
719
739
720 # the repo is now clean
740 # the repo is now clean
721 self._cachestorehash(dsturl)
741 self._cachestorehash(dsturl)
722 return res.cgresult
742 return res.cgresult
723
743
724 @annotatesubrepoerror
744 @annotatesubrepoerror
725 def outgoing(self, ui, dest, opts):
745 def outgoing(self, ui, dest, opts):
726 if 'rev' in opts or 'branch' in opts:
746 if 'rev' in opts or 'branch' in opts:
727 opts = copy.copy(opts)
747 opts = copy.copy(opts)
728 opts.pop('rev', None)
748 opts.pop('rev', None)
729 opts.pop('branch', None)
749 opts.pop('branch', None)
730 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
750 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
731
751
732 @annotatesubrepoerror
752 @annotatesubrepoerror
733 def incoming(self, ui, source, opts):
753 def incoming(self, ui, source, opts):
734 if 'rev' in opts or 'branch' in opts:
754 if 'rev' in opts or 'branch' in opts:
735 opts = copy.copy(opts)
755 opts = copy.copy(opts)
736 opts.pop('rev', None)
756 opts.pop('rev', None)
737 opts.pop('branch', None)
757 opts.pop('branch', None)
738 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
758 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
739
759
740 @annotatesubrepoerror
760 @annotatesubrepoerror
741 def files(self):
761 def files(self):
742 rev = self._state[1]
762 rev = self._state[1]
743 ctx = self._repo[rev]
763 ctx = self._repo[rev]
744 return ctx.manifest().keys()
764 return ctx.manifest().keys()
745
765
746 def filedata(self, name, decode):
766 def filedata(self, name, decode):
747 rev = self._state[1]
767 rev = self._state[1]
748 data = self._repo[rev][name].data()
768 data = self._repo[rev][name].data()
749 if decode:
769 if decode:
750 data = self._repo.wwritedata(name, data)
770 data = self._repo.wwritedata(name, data)
751 return data
771 return data
752
772
753 def fileflags(self, name):
773 def fileflags(self, name):
754 rev = self._state[1]
774 rev = self._state[1]
755 ctx = self._repo[rev]
775 ctx = self._repo[rev]
756 return ctx.flags(name)
776 return ctx.flags(name)
757
777
758 @annotatesubrepoerror
778 @annotatesubrepoerror
759 def printfiles(self, ui, m, fm, fmt, subrepos):
779 def printfiles(self, ui, m, fm, fmt, subrepos):
760 # If the parent context is a workingctx, use the workingctx here for
780 # If the parent context is a workingctx, use the workingctx here for
761 # consistency.
781 # consistency.
762 if self._ctx.rev() is None:
782 if self._ctx.rev() is None:
763 ctx = self._repo[None]
783 ctx = self._repo[None]
764 else:
784 else:
765 rev = self._state[1]
785 rev = self._state[1]
766 ctx = self._repo[rev]
786 ctx = self._repo[rev]
767 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
787 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
768
788
769 @annotatesubrepoerror
789 @annotatesubrepoerror
770 def getfileset(self, expr):
790 def getfileset(self, expr):
771 if self._ctx.rev() is None:
791 if self._ctx.rev() is None:
772 ctx = self._repo[None]
792 ctx = self._repo[None]
773 else:
793 else:
774 rev = self._state[1]
794 rev = self._state[1]
775 ctx = self._repo[rev]
795 ctx = self._repo[rev]
776
796
777 files = ctx.getfileset(expr)
797 files = ctx.getfileset(expr)
778
798
779 for subpath in ctx.substate:
799 for subpath in ctx.substate:
780 sub = ctx.sub(subpath)
800 sub = ctx.sub(subpath)
781
801
782 try:
802 try:
783 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
803 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
784 except error.LookupError:
804 except error.LookupError:
785 self.ui.status(_("skipping missing subrepository: %s\n")
805 self.ui.status(_("skipping missing subrepository: %s\n")
786 % self.wvfs.reljoin(reporelpath(self), subpath))
806 % self.wvfs.reljoin(reporelpath(self), subpath))
787 return files
807 return files
788
808
789 def walk(self, match):
809 def walk(self, match):
790 ctx = self._repo[None]
810 ctx = self._repo[None]
791 return ctx.walk(match)
811 return ctx.walk(match)
792
812
793 @annotatesubrepoerror
813 @annotatesubrepoerror
794 def forget(self, match, prefix):
814 def forget(self, match, prefix):
795 return cmdutil.forget(self.ui, self._repo, match,
815 return cmdutil.forget(self.ui, self._repo, match,
796 self.wvfs.reljoin(prefix, self._path), True)
816 self.wvfs.reljoin(prefix, self._path), True)
797
817
798 @annotatesubrepoerror
818 @annotatesubrepoerror
799 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
819 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
800 return cmdutil.remove(self.ui, self._repo, matcher,
820 return cmdutil.remove(self.ui, self._repo, matcher,
801 self.wvfs.reljoin(prefix, self._path),
821 self.wvfs.reljoin(prefix, self._path),
802 after, force, subrepos)
822 after, force, subrepos)
803
823
804 @annotatesubrepoerror
824 @annotatesubrepoerror
805 def revert(self, substate, *pats, **opts):
825 def revert(self, substate, *pats, **opts):
806 # reverting a subrepo is a 2 step process:
826 # reverting a subrepo is a 2 step process:
807 # 1. if the no_backup is not set, revert all modified
827 # 1. if the no_backup is not set, revert all modified
808 # files inside the subrepo
828 # files inside the subrepo
809 # 2. update the subrepo to the revision specified in
829 # 2. update the subrepo to the revision specified in
810 # the corresponding substate dictionary
830 # the corresponding substate dictionary
811 self.ui.status(_('reverting subrepo %s\n') % substate[0])
831 self.ui.status(_('reverting subrepo %s\n') % substate[0])
812 if not opts.get(r'no_backup'):
832 if not opts.get(r'no_backup'):
813 # Revert all files on the subrepo, creating backups
833 # Revert all files on the subrepo, creating backups
814 # Note that this will not recursively revert subrepos
834 # Note that this will not recursively revert subrepos
815 # We could do it if there was a set:subrepos() predicate
835 # We could do it if there was a set:subrepos() predicate
816 opts = opts.copy()
836 opts = opts.copy()
817 opts[r'date'] = None
837 opts[r'date'] = None
818 opts[r'rev'] = substate[1]
838 opts[r'rev'] = substate[1]
819
839
820 self.filerevert(*pats, **opts)
840 self.filerevert(*pats, **opts)
821
841
822 # Update the repo to the revision specified in the given substate
842 # Update the repo to the revision specified in the given substate
823 if not opts.get(r'dry_run'):
843 if not opts.get(r'dry_run'):
824 self.get(substate, overwrite=True)
844 self.get(substate, overwrite=True)
825
845
826 def filerevert(self, *pats, **opts):
846 def filerevert(self, *pats, **opts):
827 ctx = self._repo[opts[r'rev']]
847 ctx = self._repo[opts[r'rev']]
828 parents = self._repo.dirstate.parents()
848 parents = self._repo.dirstate.parents()
829 if opts.get(r'all'):
849 if opts.get(r'all'):
830 pats = ['set:modified()']
850 pats = ['set:modified()']
831 else:
851 else:
832 pats = []
852 pats = []
833 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
853 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
834
854
835 def shortid(self, revid):
855 def shortid(self, revid):
836 return revid[:12]
856 return revid[:12]
837
857
838 @annotatesubrepoerror
858 @annotatesubrepoerror
839 def unshare(self):
859 def unshare(self):
840 # subrepo inherently violates our import layering rules
860 # subrepo inherently violates our import layering rules
841 # because it wants to make repo objects from deep inside the stack
861 # because it wants to make repo objects from deep inside the stack
842 # so we manually delay the circular imports to not break
862 # so we manually delay the circular imports to not break
843 # scripts that don't use our demand-loading
863 # scripts that don't use our demand-loading
844 global hg
864 global hg
845 from . import hg as h
865 from . import hg as h
846 hg = h
866 hg = h
847
867
848 # Nothing prevents a user from sharing in a repo, and then making that a
868 # Nothing prevents a user from sharing in a repo, and then making that a
849 # subrepo. Alternately, the previous unshare attempt may have failed
869 # subrepo. Alternately, the previous unshare attempt may have failed
850 # part way through. So recurse whether or not this layer is shared.
870 # part way through. So recurse whether or not this layer is shared.
851 if self._repo.shared():
871 if self._repo.shared():
852 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
872 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
853
873
854 hg.unshare(self.ui, self._repo)
874 hg.unshare(self.ui, self._repo)
855
875
856 def verify(self):
876 def verify(self):
857 try:
877 try:
858 rev = self._state[1]
878 rev = self._state[1]
859 ctx = self._repo.unfiltered()[rev]
879 ctx = self._repo.unfiltered()[rev]
860 if ctx.hidden():
880 if ctx.hidden():
861 # Since hidden revisions aren't pushed/pulled, it seems worth an
881 # Since hidden revisions aren't pushed/pulled, it seems worth an
862 # explicit warning.
882 # explicit warning.
863 ui = self._repo.ui
883 ui = self._repo.ui
864 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
884 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
865 (self._relpath, node.short(self._ctx.node())))
885 (self._relpath, node.short(self._ctx.node())))
866 return 0
886 return 0
867 except error.RepoLookupError:
887 except error.RepoLookupError:
868 # A missing subrepo revision may be a case of needing to pull it, so
888 # A missing subrepo revision may be a case of needing to pull it, so
869 # don't treat this as an error.
889 # don't treat this as an error.
870 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
890 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
871 (self._relpath, node.short(self._ctx.node())))
891 (self._relpath, node.short(self._ctx.node())))
872 return 0
892 return 0
873
893
874 @propertycache
894 @propertycache
875 def wvfs(self):
895 def wvfs(self):
876 """return own wvfs for efficiency and consistency
896 """return own wvfs for efficiency and consistency
877 """
897 """
878 return self._repo.wvfs
898 return self._repo.wvfs
879
899
880 @propertycache
900 @propertycache
881 def _relpath(self):
901 def _relpath(self):
882 """return path to this subrepository as seen from outermost repository
902 """return path to this subrepository as seen from outermost repository
883 """
903 """
884 # Keep consistent dir separators by avoiding vfs.join(self._path)
904 # Keep consistent dir separators by avoiding vfs.join(self._path)
885 return reporelpath(self._repo)
905 return reporelpath(self._repo)
886
906
887 class svnsubrepo(abstractsubrepo):
907 class svnsubrepo(abstractsubrepo):
888 def __init__(self, ctx, path, state, allowcreate):
908 def __init__(self, ctx, path, state, allowcreate):
889 super(svnsubrepo, self).__init__(ctx, path)
909 super(svnsubrepo, self).__init__(ctx, path)
890 self._state = state
910 self._state = state
891 self._exe = util.findexe('svn')
911 self._exe = util.findexe('svn')
892 if not self._exe:
912 if not self._exe:
893 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
913 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
894 % self._path)
914 % self._path)
895
915
896 def _svncommand(self, commands, filename='', failok=False):
916 def _svncommand(self, commands, filename='', failok=False):
897 cmd = [self._exe]
917 cmd = [self._exe]
898 extrakw = {}
918 extrakw = {}
899 if not self.ui.interactive():
919 if not self.ui.interactive():
900 # Making stdin be a pipe should prevent svn from behaving
920 # Making stdin be a pipe should prevent svn from behaving
901 # interactively even if we can't pass --non-interactive.
921 # interactively even if we can't pass --non-interactive.
902 extrakw[r'stdin'] = subprocess.PIPE
922 extrakw[r'stdin'] = subprocess.PIPE
903 # Starting in svn 1.5 --non-interactive is a global flag
923 # Starting in svn 1.5 --non-interactive is a global flag
904 # instead of being per-command, but we need to support 1.4 so
924 # instead of being per-command, but we need to support 1.4 so
905 # we have to be intelligent about what commands take
925 # we have to be intelligent about what commands take
906 # --non-interactive.
926 # --non-interactive.
907 if commands[0] in ('update', 'checkout', 'commit'):
927 if commands[0] in ('update', 'checkout', 'commit'):
908 cmd.append('--non-interactive')
928 cmd.append('--non-interactive')
909 cmd.extend(commands)
929 cmd.extend(commands)
910 if filename is not None:
930 if filename is not None:
911 path = self.wvfs.reljoin(self._ctx.repo().origroot,
931 path = self.wvfs.reljoin(self._ctx.repo().origroot,
912 self._path, filename)
932 self._path, filename)
913 cmd.append(path)
933 cmd.append(path)
914 env = dict(encoding.environ)
934 env = dict(encoding.environ)
915 # Avoid localized output, preserve current locale for everything else.
935 # Avoid localized output, preserve current locale for everything else.
916 lc_all = env.get('LC_ALL')
936 lc_all = env.get('LC_ALL')
917 if lc_all:
937 if lc_all:
918 env['LANG'] = lc_all
938 env['LANG'] = lc_all
919 del env['LC_ALL']
939 del env['LC_ALL']
920 env['LC_MESSAGES'] = 'C'
940 env['LC_MESSAGES'] = 'C'
921 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
941 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
922 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
942 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
923 universal_newlines=True, env=env, **extrakw)
943 universal_newlines=True, env=env, **extrakw)
924 stdout, stderr = p.communicate()
944 stdout, stderr = p.communicate()
925 stderr = stderr.strip()
945 stderr = stderr.strip()
926 if not failok:
946 if not failok:
927 if p.returncode:
947 if p.returncode:
928 raise error.Abort(stderr or 'exited with code %d'
948 raise error.Abort(stderr or 'exited with code %d'
929 % p.returncode)
949 % p.returncode)
930 if stderr:
950 if stderr:
931 self.ui.warn(stderr + '\n')
951 self.ui.warn(stderr + '\n')
932 return stdout, stderr
952 return stdout, stderr
933
953
934 @propertycache
954 @propertycache
935 def _svnversion(self):
955 def _svnversion(self):
936 output, err = self._svncommand(['--version', '--quiet'], filename=None)
956 output, err = self._svncommand(['--version', '--quiet'], filename=None)
937 m = re.search(br'^(\d+)\.(\d+)', output)
957 m = re.search(br'^(\d+)\.(\d+)', output)
938 if not m:
958 if not m:
939 raise error.Abort(_('cannot retrieve svn tool version'))
959 raise error.Abort(_('cannot retrieve svn tool version'))
940 return (int(m.group(1)), int(m.group(2)))
960 return (int(m.group(1)), int(m.group(2)))
941
961
942 def _svnmissing(self):
962 def _svnmissing(self):
943 return not self.wvfs.exists('.svn')
963 return not self.wvfs.exists('.svn')
944
964
945 def _wcrevs(self):
965 def _wcrevs(self):
946 # Get the working directory revision as well as the last
966 # Get the working directory revision as well as the last
947 # commit revision so we can compare the subrepo state with
967 # commit revision so we can compare the subrepo state with
948 # both. We used to store the working directory one.
968 # both. We used to store the working directory one.
949 output, err = self._svncommand(['info', '--xml'])
969 output, err = self._svncommand(['info', '--xml'])
950 doc = xml.dom.minidom.parseString(output)
970 doc = xml.dom.minidom.parseString(output)
951 entries = doc.getElementsByTagName('entry')
971 entries = doc.getElementsByTagName('entry')
952 lastrev, rev = '0', '0'
972 lastrev, rev = '0', '0'
953 if entries:
973 if entries:
954 rev = str(entries[0].getAttribute('revision')) or '0'
974 rev = str(entries[0].getAttribute('revision')) or '0'
955 commits = entries[0].getElementsByTagName('commit')
975 commits = entries[0].getElementsByTagName('commit')
956 if commits:
976 if commits:
957 lastrev = str(commits[0].getAttribute('revision')) or '0'
977 lastrev = str(commits[0].getAttribute('revision')) or '0'
958 return (lastrev, rev)
978 return (lastrev, rev)
959
979
960 def _wcrev(self):
980 def _wcrev(self):
961 return self._wcrevs()[0]
981 return self._wcrevs()[0]
962
982
963 def _wcchanged(self):
983 def _wcchanged(self):
964 """Return (changes, extchanges, missing) where changes is True
984 """Return (changes, extchanges, missing) where changes is True
965 if the working directory was changed, extchanges is
985 if the working directory was changed, extchanges is
966 True if any of these changes concern an external entry and missing
986 True if any of these changes concern an external entry and missing
967 is True if any change is a missing entry.
987 is True if any change is a missing entry.
968 """
988 """
969 output, err = self._svncommand(['status', '--xml'])
989 output, err = self._svncommand(['status', '--xml'])
970 externals, changes, missing = [], [], []
990 externals, changes, missing = [], [], []
971 doc = xml.dom.minidom.parseString(output)
991 doc = xml.dom.minidom.parseString(output)
972 for e in doc.getElementsByTagName('entry'):
992 for e in doc.getElementsByTagName('entry'):
973 s = e.getElementsByTagName('wc-status')
993 s = e.getElementsByTagName('wc-status')
974 if not s:
994 if not s:
975 continue
995 continue
976 item = s[0].getAttribute('item')
996 item = s[0].getAttribute('item')
977 props = s[0].getAttribute('props')
997 props = s[0].getAttribute('props')
978 path = e.getAttribute('path')
998 path = e.getAttribute('path')
979 if item == 'external':
999 if item == 'external':
980 externals.append(path)
1000 externals.append(path)
981 elif item == 'missing':
1001 elif item == 'missing':
982 missing.append(path)
1002 missing.append(path)
983 if (item not in ('', 'normal', 'unversioned', 'external')
1003 if (item not in ('', 'normal', 'unversioned', 'external')
984 or props not in ('', 'none', 'normal')):
1004 or props not in ('', 'none', 'normal')):
985 changes.append(path)
1005 changes.append(path)
986 for path in changes:
1006 for path in changes:
987 for ext in externals:
1007 for ext in externals:
988 if path == ext or path.startswith(ext + pycompat.ossep):
1008 if path == ext or path.startswith(ext + pycompat.ossep):
989 return True, True, bool(missing)
1009 return True, True, bool(missing)
990 return bool(changes), False, bool(missing)
1010 return bool(changes), False, bool(missing)
991
1011
992 @annotatesubrepoerror
1012 @annotatesubrepoerror
993 def dirty(self, ignoreupdate=False, missing=False):
1013 def dirty(self, ignoreupdate=False, missing=False):
994 if self._svnmissing():
1014 if self._svnmissing():
995 return self._state[1] != ''
1015 return self._state[1] != ''
996 wcchanged = self._wcchanged()
1016 wcchanged = self._wcchanged()
997 changed = wcchanged[0] or (missing and wcchanged[2])
1017 changed = wcchanged[0] or (missing and wcchanged[2])
998 if not changed:
1018 if not changed:
999 if self._state[1] in self._wcrevs() or ignoreupdate:
1019 if self._state[1] in self._wcrevs() or ignoreupdate:
1000 return False
1020 return False
1001 return True
1021 return True
1002
1022
1003 def basestate(self):
1023 def basestate(self):
1004 lastrev, rev = self._wcrevs()
1024 lastrev, rev = self._wcrevs()
1005 if lastrev != rev:
1025 if lastrev != rev:
1006 # Last committed rev is not the same than rev. We would
1026 # Last committed rev is not the same than rev. We would
1007 # like to take lastrev but we do not know if the subrepo
1027 # like to take lastrev but we do not know if the subrepo
1008 # URL exists at lastrev. Test it and fallback to rev it
1028 # URL exists at lastrev. Test it and fallback to rev it
1009 # is not there.
1029 # is not there.
1010 try:
1030 try:
1011 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1031 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1012 return lastrev
1032 return lastrev
1013 except error.Abort:
1033 except error.Abort:
1014 pass
1034 pass
1015 return rev
1035 return rev
1016
1036
1017 @annotatesubrepoerror
1037 @annotatesubrepoerror
1018 def commit(self, text, user, date):
1038 def commit(self, text, user, date):
1019 # user and date are out of our hands since svn is centralized
1039 # user and date are out of our hands since svn is centralized
1020 changed, extchanged, missing = self._wcchanged()
1040 changed, extchanged, missing = self._wcchanged()
1021 if not changed:
1041 if not changed:
1022 return self.basestate()
1042 return self.basestate()
1023 if extchanged:
1043 if extchanged:
1024 # Do not try to commit externals
1044 # Do not try to commit externals
1025 raise error.Abort(_('cannot commit svn externals'))
1045 raise error.Abort(_('cannot commit svn externals'))
1026 if missing:
1046 if missing:
1027 # svn can commit with missing entries but aborting like hg
1047 # svn can commit with missing entries but aborting like hg
1028 # seems a better approach.
1048 # seems a better approach.
1029 raise error.Abort(_('cannot commit missing svn entries'))
1049 raise error.Abort(_('cannot commit missing svn entries'))
1030 commitinfo, err = self._svncommand(['commit', '-m', text])
1050 commitinfo, err = self._svncommand(['commit', '-m', text])
1031 self.ui.status(commitinfo)
1051 self.ui.status(commitinfo)
1032 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1052 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1033 if not newrev:
1053 if not newrev:
1034 if not commitinfo.strip():
1054 if not commitinfo.strip():
1035 # Sometimes, our definition of "changed" differs from
1055 # Sometimes, our definition of "changed" differs from
1036 # svn one. For instance, svn ignores missing files
1056 # svn one. For instance, svn ignores missing files
1037 # when committing. If there are only missing files, no
1057 # when committing. If there are only missing files, no
1038 # commit is made, no output and no error code.
1058 # commit is made, no output and no error code.
1039 raise error.Abort(_('failed to commit svn changes'))
1059 raise error.Abort(_('failed to commit svn changes'))
1040 raise error.Abort(commitinfo.splitlines()[-1])
1060 raise error.Abort(commitinfo.splitlines()[-1])
1041 newrev = newrev.groups()[0]
1061 newrev = newrev.groups()[0]
1042 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1062 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1043 return newrev
1063 return newrev
1044
1064
1045 @annotatesubrepoerror
1065 @annotatesubrepoerror
1046 def remove(self):
1066 def remove(self):
1047 if self.dirty():
1067 if self.dirty():
1048 self.ui.warn(_('not removing repo %s because '
1068 self.ui.warn(_('not removing repo %s because '
1049 'it has changes.\n') % self._path)
1069 'it has changes.\n') % self._path)
1050 return
1070 return
1051 self.ui.note(_('removing subrepo %s\n') % self._path)
1071 self.ui.note(_('removing subrepo %s\n') % self._path)
1052
1072
1053 self.wvfs.rmtree(forcibly=True)
1073 self.wvfs.rmtree(forcibly=True)
1054 try:
1074 try:
1055 pwvfs = self._ctx.repo().wvfs
1075 pwvfs = self._ctx.repo().wvfs
1056 pwvfs.removedirs(pwvfs.dirname(self._path))
1076 pwvfs.removedirs(pwvfs.dirname(self._path))
1057 except OSError:
1077 except OSError:
1058 pass
1078 pass
1059
1079
1060 @annotatesubrepoerror
1080 @annotatesubrepoerror
1061 def get(self, state, overwrite=False):
1081 def get(self, state, overwrite=False):
1062 if overwrite:
1082 if overwrite:
1063 self._svncommand(['revert', '--recursive'])
1083 self._svncommand(['revert', '--recursive'])
1064 args = ['checkout']
1084 args = ['checkout']
1065 if self._svnversion >= (1, 5):
1085 if self._svnversion >= (1, 5):
1066 args.append('--force')
1086 args.append('--force')
1067 # The revision must be specified at the end of the URL to properly
1087 # The revision must be specified at the end of the URL to properly
1068 # update to a directory which has since been deleted and recreated.
1088 # update to a directory which has since been deleted and recreated.
1069 args.append('%s@%s' % (state[0], state[1]))
1089 args.append('%s@%s' % (state[0], state[1]))
1070
1090
1071 # SEC: check that the ssh url is safe
1091 # SEC: check that the ssh url is safe
1072 util.checksafessh(state[0])
1092 util.checksafessh(state[0])
1073
1093
1074 status, err = self._svncommand(args, failok=True)
1094 status, err = self._svncommand(args, failok=True)
1075 _sanitize(self.ui, self.wvfs, '.svn')
1095 _sanitize(self.ui, self.wvfs, '.svn')
1076 if not re.search('Checked out revision [0-9]+.', status):
1096 if not re.search('Checked out revision [0-9]+.', status):
1077 if ('is already a working copy for a different URL' in err
1097 if ('is already a working copy for a different URL' in err
1078 and (self._wcchanged()[:2] == (False, False))):
1098 and (self._wcchanged()[:2] == (False, False))):
1079 # obstructed but clean working copy, so just blow it away.
1099 # obstructed but clean working copy, so just blow it away.
1080 self.remove()
1100 self.remove()
1081 self.get(state, overwrite=False)
1101 self.get(state, overwrite=False)
1082 return
1102 return
1083 raise error.Abort((status or err).splitlines()[-1])
1103 raise error.Abort((status or err).splitlines()[-1])
1084 self.ui.status(status)
1104 self.ui.status(status)
1085
1105
1086 @annotatesubrepoerror
1106 @annotatesubrepoerror
1087 def merge(self, state):
1107 def merge(self, state):
1088 old = self._state[1]
1108 old = self._state[1]
1089 new = state[1]
1109 new = state[1]
1090 wcrev = self._wcrev()
1110 wcrev = self._wcrev()
1091 if new != wcrev:
1111 if new != wcrev:
1092 dirty = old == wcrev or self._wcchanged()[0]
1112 dirty = old == wcrev or self._wcchanged()[0]
1093 if _updateprompt(self.ui, self, dirty, wcrev, new):
1113 if _updateprompt(self.ui, self, dirty, wcrev, new):
1094 self.get(state, False)
1114 self.get(state, False)
1095
1115
1096 def push(self, opts):
1116 def push(self, opts):
1097 # push is a no-op for SVN
1117 # push is a no-op for SVN
1098 return True
1118 return True
1099
1119
1100 @annotatesubrepoerror
1120 @annotatesubrepoerror
1101 def files(self):
1121 def files(self):
1102 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1122 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1103 doc = xml.dom.minidom.parseString(output)
1123 doc = xml.dom.minidom.parseString(output)
1104 paths = []
1124 paths = []
1105 for e in doc.getElementsByTagName('entry'):
1125 for e in doc.getElementsByTagName('entry'):
1106 kind = str(e.getAttribute('kind'))
1126 kind = str(e.getAttribute('kind'))
1107 if kind != 'file':
1127 if kind != 'file':
1108 continue
1128 continue
1109 name = ''.join(c.data for c
1129 name = ''.join(c.data for c
1110 in e.getElementsByTagName('name')[0].childNodes
1130 in e.getElementsByTagName('name')[0].childNodes
1111 if c.nodeType == c.TEXT_NODE)
1131 if c.nodeType == c.TEXT_NODE)
1112 paths.append(name.encode('utf-8'))
1132 paths.append(name.encode('utf-8'))
1113 return paths
1133 return paths
1114
1134
1115 def filedata(self, name, decode):
1135 def filedata(self, name, decode):
1116 return self._svncommand(['cat'], name)[0]
1136 return self._svncommand(['cat'], name)[0]
1117
1137
1118
1138
1119 class gitsubrepo(abstractsubrepo):
1139 class gitsubrepo(abstractsubrepo):
1120 def __init__(self, ctx, path, state, allowcreate):
1140 def __init__(self, ctx, path, state, allowcreate):
1121 super(gitsubrepo, self).__init__(ctx, path)
1141 super(gitsubrepo, self).__init__(ctx, path)
1122 self._state = state
1142 self._state = state
1123 self._abspath = ctx.repo().wjoin(path)
1143 self._abspath = ctx.repo().wjoin(path)
1124 self._subparent = ctx.repo()
1144 self._subparent = ctx.repo()
1125 self._ensuregit()
1145 self._ensuregit()
1126
1146
1127 def _ensuregit(self):
1147 def _ensuregit(self):
1128 try:
1148 try:
1129 self._gitexecutable = 'git'
1149 self._gitexecutable = 'git'
1130 out, err = self._gitnodir(['--version'])
1150 out, err = self._gitnodir(['--version'])
1131 except OSError as e:
1151 except OSError as e:
1132 genericerror = _("error executing git for subrepo '%s': %s")
1152 genericerror = _("error executing git for subrepo '%s': %s")
1133 notfoundhint = _("check git is installed and in your PATH")
1153 notfoundhint = _("check git is installed and in your PATH")
1134 if e.errno != errno.ENOENT:
1154 if e.errno != errno.ENOENT:
1135 raise error.Abort(genericerror % (
1155 raise error.Abort(genericerror % (
1136 self._path, encoding.strtolocal(e.strerror)))
1156 self._path, encoding.strtolocal(e.strerror)))
1137 elif pycompat.iswindows:
1157 elif pycompat.iswindows:
1138 try:
1158 try:
1139 self._gitexecutable = 'git.cmd'
1159 self._gitexecutable = 'git.cmd'
1140 out, err = self._gitnodir(['--version'])
1160 out, err = self._gitnodir(['--version'])
1141 except OSError as e2:
1161 except OSError as e2:
1142 if e2.errno == errno.ENOENT:
1162 if e2.errno == errno.ENOENT:
1143 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1163 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1144 " for subrepo '%s'") % self._path,
1164 " for subrepo '%s'") % self._path,
1145 hint=notfoundhint)
1165 hint=notfoundhint)
1146 else:
1166 else:
1147 raise error.Abort(genericerror % (self._path,
1167 raise error.Abort(genericerror % (self._path,
1148 encoding.strtolocal(e2.strerror)))
1168 encoding.strtolocal(e2.strerror)))
1149 else:
1169 else:
1150 raise error.Abort(_("couldn't find git for subrepo '%s'")
1170 raise error.Abort(_("couldn't find git for subrepo '%s'")
1151 % self._path, hint=notfoundhint)
1171 % self._path, hint=notfoundhint)
1152 versionstatus = self._checkversion(out)
1172 versionstatus = self._checkversion(out)
1153 if versionstatus == 'unknown':
1173 if versionstatus == 'unknown':
1154 self.ui.warn(_('cannot retrieve git version\n'))
1174 self.ui.warn(_('cannot retrieve git version\n'))
1155 elif versionstatus == 'abort':
1175 elif versionstatus == 'abort':
1156 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1176 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1157 elif versionstatus == 'warning':
1177 elif versionstatus == 'warning':
1158 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1178 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1159
1179
1160 @staticmethod
1180 @staticmethod
1161 def _gitversion(out):
1181 def _gitversion(out):
1162 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1182 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1163 if m:
1183 if m:
1164 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1184 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1165
1185
1166 m = re.search(br'^git version (\d+)\.(\d+)', out)
1186 m = re.search(br'^git version (\d+)\.(\d+)', out)
1167 if m:
1187 if m:
1168 return (int(m.group(1)), int(m.group(2)), 0)
1188 return (int(m.group(1)), int(m.group(2)), 0)
1169
1189
1170 return -1
1190 return -1
1171
1191
1172 @staticmethod
1192 @staticmethod
1173 def _checkversion(out):
1193 def _checkversion(out):
1174 '''ensure git version is new enough
1194 '''ensure git version is new enough
1175
1195
1176 >>> _checkversion = gitsubrepo._checkversion
1196 >>> _checkversion = gitsubrepo._checkversion
1177 >>> _checkversion(b'git version 1.6.0')
1197 >>> _checkversion(b'git version 1.6.0')
1178 'ok'
1198 'ok'
1179 >>> _checkversion(b'git version 1.8.5')
1199 >>> _checkversion(b'git version 1.8.5')
1180 'ok'
1200 'ok'
1181 >>> _checkversion(b'git version 1.4.0')
1201 >>> _checkversion(b'git version 1.4.0')
1182 'abort'
1202 'abort'
1183 >>> _checkversion(b'git version 1.5.0')
1203 >>> _checkversion(b'git version 1.5.0')
1184 'warning'
1204 'warning'
1185 >>> _checkversion(b'git version 1.9-rc0')
1205 >>> _checkversion(b'git version 1.9-rc0')
1186 'ok'
1206 'ok'
1187 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1207 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1188 'ok'
1208 'ok'
1189 >>> _checkversion(b'git version 1.9.0.GIT')
1209 >>> _checkversion(b'git version 1.9.0.GIT')
1190 'ok'
1210 'ok'
1191 >>> _checkversion(b'git version 12345')
1211 >>> _checkversion(b'git version 12345')
1192 'unknown'
1212 'unknown'
1193 >>> _checkversion(b'no')
1213 >>> _checkversion(b'no')
1194 'unknown'
1214 'unknown'
1195 '''
1215 '''
1196 version = gitsubrepo._gitversion(out)
1216 version = gitsubrepo._gitversion(out)
1197 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1217 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1198 # despite the docstring comment. For now, error on 1.4.0, warn on
1218 # despite the docstring comment. For now, error on 1.4.0, warn on
1199 # 1.5.0 but attempt to continue.
1219 # 1.5.0 but attempt to continue.
1200 if version == -1:
1220 if version == -1:
1201 return 'unknown'
1221 return 'unknown'
1202 if version < (1, 5, 0):
1222 if version < (1, 5, 0):
1203 return 'abort'
1223 return 'abort'
1204 elif version < (1, 6, 0):
1224 elif version < (1, 6, 0):
1205 return 'warning'
1225 return 'warning'
1206 return 'ok'
1226 return 'ok'
1207
1227
1208 def _gitcommand(self, commands, env=None, stream=False):
1228 def _gitcommand(self, commands, env=None, stream=False):
1209 return self._gitdir(commands, env=env, stream=stream)[0]
1229 return self._gitdir(commands, env=env, stream=stream)[0]
1210
1230
1211 def _gitdir(self, commands, env=None, stream=False):
1231 def _gitdir(self, commands, env=None, stream=False):
1212 return self._gitnodir(commands, env=env, stream=stream,
1232 return self._gitnodir(commands, env=env, stream=stream,
1213 cwd=self._abspath)
1233 cwd=self._abspath)
1214
1234
1215 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1235 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1216 """Calls the git command
1236 """Calls the git command
1217
1237
1218 The methods tries to call the git command. versions prior to 1.6.0
1238 The methods tries to call the git command. versions prior to 1.6.0
1219 are not supported and very probably fail.
1239 are not supported and very probably fail.
1220 """
1240 """
1221 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1241 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1222 if env is None:
1242 if env is None:
1223 env = encoding.environ.copy()
1243 env = encoding.environ.copy()
1224 # disable localization for Git output (issue5176)
1244 # disable localization for Git output (issue5176)
1225 env['LC_ALL'] = 'C'
1245 env['LC_ALL'] = 'C'
1226 # fix for Git CVE-2015-7545
1246 # fix for Git CVE-2015-7545
1227 if 'GIT_ALLOW_PROTOCOL' not in env:
1247 if 'GIT_ALLOW_PROTOCOL' not in env:
1228 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1248 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1229 # unless ui.quiet is set, print git's stderr,
1249 # unless ui.quiet is set, print git's stderr,
1230 # which is mostly progress and useful info
1250 # which is mostly progress and useful info
1231 errpipe = None
1251 errpipe = None
1232 if self.ui.quiet:
1252 if self.ui.quiet:
1233 errpipe = open(os.devnull, 'w')
1253 errpipe = open(os.devnull, 'w')
1234 if self.ui._colormode and len(commands) and commands[0] == "diff":
1254 if self.ui._colormode and len(commands) and commands[0] == "diff":
1235 # insert the argument in the front,
1255 # insert the argument in the front,
1236 # the end of git diff arguments is used for paths
1256 # the end of git diff arguments is used for paths
1237 commands.insert(1, '--color')
1257 commands.insert(1, '--color')
1238 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1258 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1239 cwd=cwd, env=env, close_fds=util.closefds,
1259 cwd=cwd, env=env, close_fds=util.closefds,
1240 stdout=subprocess.PIPE, stderr=errpipe)
1260 stdout=subprocess.PIPE, stderr=errpipe)
1241 if stream:
1261 if stream:
1242 return p.stdout, None
1262 return p.stdout, None
1243
1263
1244 retdata = p.stdout.read().strip()
1264 retdata = p.stdout.read().strip()
1245 # wait for the child to exit to avoid race condition.
1265 # wait for the child to exit to avoid race condition.
1246 p.wait()
1266 p.wait()
1247
1267
1248 if p.returncode != 0 and p.returncode != 1:
1268 if p.returncode != 0 and p.returncode != 1:
1249 # there are certain error codes that are ok
1269 # there are certain error codes that are ok
1250 command = commands[0]
1270 command = commands[0]
1251 if command in ('cat-file', 'symbolic-ref'):
1271 if command in ('cat-file', 'symbolic-ref'):
1252 return retdata, p.returncode
1272 return retdata, p.returncode
1253 # for all others, abort
1273 # for all others, abort
1254 raise error.Abort(_('git %s error %d in %s') %
1274 raise error.Abort(_('git %s error %d in %s') %
1255 (command, p.returncode, self._relpath))
1275 (command, p.returncode, self._relpath))
1256
1276
1257 return retdata, p.returncode
1277 return retdata, p.returncode
1258
1278
1259 def _gitmissing(self):
1279 def _gitmissing(self):
1260 return not self.wvfs.exists('.git')
1280 return not self.wvfs.exists('.git')
1261
1281
1262 def _gitstate(self):
1282 def _gitstate(self):
1263 return self._gitcommand(['rev-parse', 'HEAD'])
1283 return self._gitcommand(['rev-parse', 'HEAD'])
1264
1284
1265 def _gitcurrentbranch(self):
1285 def _gitcurrentbranch(self):
1266 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1286 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1267 if err:
1287 if err:
1268 current = None
1288 current = None
1269 return current
1289 return current
1270
1290
1271 def _gitremote(self, remote):
1291 def _gitremote(self, remote):
1272 out = self._gitcommand(['remote', 'show', '-n', remote])
1292 out = self._gitcommand(['remote', 'show', '-n', remote])
1273 line = out.split('\n')[1]
1293 line = out.split('\n')[1]
1274 i = line.index('URL: ') + len('URL: ')
1294 i = line.index('URL: ') + len('URL: ')
1275 return line[i:]
1295 return line[i:]
1276
1296
1277 def _githavelocally(self, revision):
1297 def _githavelocally(self, revision):
1278 out, code = self._gitdir(['cat-file', '-e', revision])
1298 out, code = self._gitdir(['cat-file', '-e', revision])
1279 return code == 0
1299 return code == 0
1280
1300
1281 def _gitisancestor(self, r1, r2):
1301 def _gitisancestor(self, r1, r2):
1282 base = self._gitcommand(['merge-base', r1, r2])
1302 base = self._gitcommand(['merge-base', r1, r2])
1283 return base == r1
1303 return base == r1
1284
1304
1285 def _gitisbare(self):
1305 def _gitisbare(self):
1286 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1306 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1287
1307
1288 def _gitupdatestat(self):
1308 def _gitupdatestat(self):
1289 """This must be run before git diff-index.
1309 """This must be run before git diff-index.
1290 diff-index only looks at changes to file stat;
1310 diff-index only looks at changes to file stat;
1291 this command looks at file contents and updates the stat."""
1311 this command looks at file contents and updates the stat."""
1292 self._gitcommand(['update-index', '-q', '--refresh'])
1312 self._gitcommand(['update-index', '-q', '--refresh'])
1293
1313
1294 def _gitbranchmap(self):
1314 def _gitbranchmap(self):
1295 '''returns 2 things:
1315 '''returns 2 things:
1296 a map from git branch to revision
1316 a map from git branch to revision
1297 a map from revision to branches'''
1317 a map from revision to branches'''
1298 branch2rev = {}
1318 branch2rev = {}
1299 rev2branch = {}
1319 rev2branch = {}
1300
1320
1301 out = self._gitcommand(['for-each-ref', '--format',
1321 out = self._gitcommand(['for-each-ref', '--format',
1302 '%(objectname) %(refname)'])
1322 '%(objectname) %(refname)'])
1303 for line in out.split('\n'):
1323 for line in out.split('\n'):
1304 revision, ref = line.split(' ')
1324 revision, ref = line.split(' ')
1305 if (not ref.startswith('refs/heads/') and
1325 if (not ref.startswith('refs/heads/') and
1306 not ref.startswith('refs/remotes/')):
1326 not ref.startswith('refs/remotes/')):
1307 continue
1327 continue
1308 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1328 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1309 continue # ignore remote/HEAD redirects
1329 continue # ignore remote/HEAD redirects
1310 branch2rev[ref] = revision
1330 branch2rev[ref] = revision
1311 rev2branch.setdefault(revision, []).append(ref)
1331 rev2branch.setdefault(revision, []).append(ref)
1312 return branch2rev, rev2branch
1332 return branch2rev, rev2branch
1313
1333
1314 def _gittracking(self, branches):
1334 def _gittracking(self, branches):
1315 'return map of remote branch to local tracking branch'
1335 'return map of remote branch to local tracking branch'
1316 # assumes no more than one local tracking branch for each remote
1336 # assumes no more than one local tracking branch for each remote
1317 tracking = {}
1337 tracking = {}
1318 for b in branches:
1338 for b in branches:
1319 if b.startswith('refs/remotes/'):
1339 if b.startswith('refs/remotes/'):
1320 continue
1340 continue
1321 bname = b.split('/', 2)[2]
1341 bname = b.split('/', 2)[2]
1322 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1342 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1323 if remote:
1343 if remote:
1324 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1344 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1325 tracking['refs/remotes/%s/%s' %
1345 tracking['refs/remotes/%s/%s' %
1326 (remote, ref.split('/', 2)[2])] = b
1346 (remote, ref.split('/', 2)[2])] = b
1327 return tracking
1347 return tracking
1328
1348
1329 def _abssource(self, source):
1349 def _abssource(self, source):
1330 if '://' not in source:
1350 if '://' not in source:
1331 # recognize the scp syntax as an absolute source
1351 # recognize the scp syntax as an absolute source
1332 colon = source.find(':')
1352 colon = source.find(':')
1333 if colon != -1 and '/' not in source[:colon]:
1353 if colon != -1 and '/' not in source[:colon]:
1334 return source
1354 return source
1335 self._subsource = source
1355 self._subsource = source
1336 return _abssource(self)
1356 return _abssource(self)
1337
1357
1338 def _fetch(self, source, revision):
1358 def _fetch(self, source, revision):
1339 if self._gitmissing():
1359 if self._gitmissing():
1340 # SEC: check for safe ssh url
1360 # SEC: check for safe ssh url
1341 util.checksafessh(source)
1361 util.checksafessh(source)
1342
1362
1343 source = self._abssource(source)
1363 source = self._abssource(source)
1344 self.ui.status(_('cloning subrepo %s from %s\n') %
1364 self.ui.status(_('cloning subrepo %s from %s\n') %
1345 (self._relpath, source))
1365 (self._relpath, source))
1346 self._gitnodir(['clone', source, self._abspath])
1366 self._gitnodir(['clone', source, self._abspath])
1347 if self._githavelocally(revision):
1367 if self._githavelocally(revision):
1348 return
1368 return
1349 self.ui.status(_('pulling subrepo %s from %s\n') %
1369 self.ui.status(_('pulling subrepo %s from %s\n') %
1350 (self._relpath, self._gitremote('origin')))
1370 (self._relpath, self._gitremote('origin')))
1351 # try only origin: the originally cloned repo
1371 # try only origin: the originally cloned repo
1352 self._gitcommand(['fetch'])
1372 self._gitcommand(['fetch'])
1353 if not self._githavelocally(revision):
1373 if not self._githavelocally(revision):
1354 raise error.Abort(_('revision %s does not exist in subrepository '
1374 raise error.Abort(_('revision %s does not exist in subrepository '
1355 '"%s"\n') % (revision, self._relpath))
1375 '"%s"\n') % (revision, self._relpath))
1356
1376
1357 @annotatesubrepoerror
1377 @annotatesubrepoerror
1358 def dirty(self, ignoreupdate=False, missing=False):
1378 def dirty(self, ignoreupdate=False, missing=False):
1359 if self._gitmissing():
1379 if self._gitmissing():
1360 return self._state[1] != ''
1380 return self._state[1] != ''
1361 if self._gitisbare():
1381 if self._gitisbare():
1362 return True
1382 return True
1363 if not ignoreupdate and self._state[1] != self._gitstate():
1383 if not ignoreupdate and self._state[1] != self._gitstate():
1364 # different version checked out
1384 # different version checked out
1365 return True
1385 return True
1366 # check for staged changes or modified files; ignore untracked files
1386 # check for staged changes or modified files; ignore untracked files
1367 self._gitupdatestat()
1387 self._gitupdatestat()
1368 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1388 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1369 return code == 1
1389 return code == 1
1370
1390
1371 def basestate(self):
1391 def basestate(self):
1372 return self._gitstate()
1392 return self._gitstate()
1373
1393
1374 @annotatesubrepoerror
1394 @annotatesubrepoerror
1375 def get(self, state, overwrite=False):
1395 def get(self, state, overwrite=False):
1376 source, revision, kind = state
1396 source, revision, kind = state
1377 if not revision:
1397 if not revision:
1378 self.remove()
1398 self.remove()
1379 return
1399 return
1380 self._fetch(source, revision)
1400 self._fetch(source, revision)
1381 # if the repo was set to be bare, unbare it
1401 # if the repo was set to be bare, unbare it
1382 if self._gitisbare():
1402 if self._gitisbare():
1383 self._gitcommand(['config', 'core.bare', 'false'])
1403 self._gitcommand(['config', 'core.bare', 'false'])
1384 if self._gitstate() == revision:
1404 if self._gitstate() == revision:
1385 self._gitcommand(['reset', '--hard', 'HEAD'])
1405 self._gitcommand(['reset', '--hard', 'HEAD'])
1386 return
1406 return
1387 elif self._gitstate() == revision:
1407 elif self._gitstate() == revision:
1388 if overwrite:
1408 if overwrite:
1389 # first reset the index to unmark new files for commit, because
1409 # first reset the index to unmark new files for commit, because
1390 # reset --hard will otherwise throw away files added for commit,
1410 # reset --hard will otherwise throw away files added for commit,
1391 # not just unmark them.
1411 # not just unmark them.
1392 self._gitcommand(['reset', 'HEAD'])
1412 self._gitcommand(['reset', 'HEAD'])
1393 self._gitcommand(['reset', '--hard', 'HEAD'])
1413 self._gitcommand(['reset', '--hard', 'HEAD'])
1394 return
1414 return
1395 branch2rev, rev2branch = self._gitbranchmap()
1415 branch2rev, rev2branch = self._gitbranchmap()
1396
1416
1397 def checkout(args):
1417 def checkout(args):
1398 cmd = ['checkout']
1418 cmd = ['checkout']
1399 if overwrite:
1419 if overwrite:
1400 # first reset the index to unmark new files for commit, because
1420 # first reset the index to unmark new files for commit, because
1401 # the -f option will otherwise throw away files added for
1421 # the -f option will otherwise throw away files added for
1402 # commit, not just unmark them.
1422 # commit, not just unmark them.
1403 self._gitcommand(['reset', 'HEAD'])
1423 self._gitcommand(['reset', 'HEAD'])
1404 cmd.append('-f')
1424 cmd.append('-f')
1405 self._gitcommand(cmd + args)
1425 self._gitcommand(cmd + args)
1406 _sanitize(self.ui, self.wvfs, '.git')
1426 _sanitize(self.ui, self.wvfs, '.git')
1407
1427
1408 def rawcheckout():
1428 def rawcheckout():
1409 # no branch to checkout, check it out with no branch
1429 # no branch to checkout, check it out with no branch
1410 self.ui.warn(_('checking out detached HEAD in '
1430 self.ui.warn(_('checking out detached HEAD in '
1411 'subrepository "%s"\n') % self._relpath)
1431 'subrepository "%s"\n') % self._relpath)
1412 self.ui.warn(_('check out a git branch if you intend '
1432 self.ui.warn(_('check out a git branch if you intend '
1413 'to make changes\n'))
1433 'to make changes\n'))
1414 checkout(['-q', revision])
1434 checkout(['-q', revision])
1415
1435
1416 if revision not in rev2branch:
1436 if revision not in rev2branch:
1417 rawcheckout()
1437 rawcheckout()
1418 return
1438 return
1419 branches = rev2branch[revision]
1439 branches = rev2branch[revision]
1420 firstlocalbranch = None
1440 firstlocalbranch = None
1421 for b in branches:
1441 for b in branches:
1422 if b == 'refs/heads/master':
1442 if b == 'refs/heads/master':
1423 # master trumps all other branches
1443 # master trumps all other branches
1424 checkout(['refs/heads/master'])
1444 checkout(['refs/heads/master'])
1425 return
1445 return
1426 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1446 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1427 firstlocalbranch = b
1447 firstlocalbranch = b
1428 if firstlocalbranch:
1448 if firstlocalbranch:
1429 checkout([firstlocalbranch])
1449 checkout([firstlocalbranch])
1430 return
1450 return
1431
1451
1432 tracking = self._gittracking(branch2rev.keys())
1452 tracking = self._gittracking(branch2rev.keys())
1433 # choose a remote branch already tracked if possible
1453 # choose a remote branch already tracked if possible
1434 remote = branches[0]
1454 remote = branches[0]
1435 if remote not in tracking:
1455 if remote not in tracking:
1436 for b in branches:
1456 for b in branches:
1437 if b in tracking:
1457 if b in tracking:
1438 remote = b
1458 remote = b
1439 break
1459 break
1440
1460
1441 if remote not in tracking:
1461 if remote not in tracking:
1442 # create a new local tracking branch
1462 # create a new local tracking branch
1443 local = remote.split('/', 3)[3]
1463 local = remote.split('/', 3)[3]
1444 checkout(['-b', local, remote])
1464 checkout(['-b', local, remote])
1445 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1465 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1446 # When updating to a tracked remote branch,
1466 # When updating to a tracked remote branch,
1447 # if the local tracking branch is downstream of it,
1467 # if the local tracking branch is downstream of it,
1448 # a normal `git pull` would have performed a "fast-forward merge"
1468 # a normal `git pull` would have performed a "fast-forward merge"
1449 # which is equivalent to updating the local branch to the remote.
1469 # which is equivalent to updating the local branch to the remote.
1450 # Since we are only looking at branching at update, we need to
1470 # Since we are only looking at branching at update, we need to
1451 # detect this situation and perform this action lazily.
1471 # detect this situation and perform this action lazily.
1452 if tracking[remote] != self._gitcurrentbranch():
1472 if tracking[remote] != self._gitcurrentbranch():
1453 checkout([tracking[remote]])
1473 checkout([tracking[remote]])
1454 self._gitcommand(['merge', '--ff', remote])
1474 self._gitcommand(['merge', '--ff', remote])
1455 _sanitize(self.ui, self.wvfs, '.git')
1475 _sanitize(self.ui, self.wvfs, '.git')
1456 else:
1476 else:
1457 # a real merge would be required, just checkout the revision
1477 # a real merge would be required, just checkout the revision
1458 rawcheckout()
1478 rawcheckout()
1459
1479
1460 @annotatesubrepoerror
1480 @annotatesubrepoerror
1461 def commit(self, text, user, date):
1481 def commit(self, text, user, date):
1462 if self._gitmissing():
1482 if self._gitmissing():
1463 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1483 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1464 cmd = ['commit', '-a', '-m', text]
1484 cmd = ['commit', '-a', '-m', text]
1465 env = encoding.environ.copy()
1485 env = encoding.environ.copy()
1466 if user:
1486 if user:
1467 cmd += ['--author', user]
1487 cmd += ['--author', user]
1468 if date:
1488 if date:
1469 # git's date parser silently ignores when seconds < 1e9
1489 # git's date parser silently ignores when seconds < 1e9
1470 # convert to ISO8601
1490 # convert to ISO8601
1471 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1491 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1472 '%Y-%m-%dT%H:%M:%S %1%2')
1492 '%Y-%m-%dT%H:%M:%S %1%2')
1473 self._gitcommand(cmd, env=env)
1493 self._gitcommand(cmd, env=env)
1474 # make sure commit works otherwise HEAD might not exist under certain
1494 # make sure commit works otherwise HEAD might not exist under certain
1475 # circumstances
1495 # circumstances
1476 return self._gitstate()
1496 return self._gitstate()
1477
1497
1478 @annotatesubrepoerror
1498 @annotatesubrepoerror
1479 def merge(self, state):
1499 def merge(self, state):
1480 source, revision, kind = state
1500 source, revision, kind = state
1481 self._fetch(source, revision)
1501 self._fetch(source, revision)
1482 base = self._gitcommand(['merge-base', revision, self._state[1]])
1502 base = self._gitcommand(['merge-base', revision, self._state[1]])
1483 self._gitupdatestat()
1503 self._gitupdatestat()
1484 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1504 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1485
1505
1486 def mergefunc():
1506 def mergefunc():
1487 if base == revision:
1507 if base == revision:
1488 self.get(state) # fast forward merge
1508 self.get(state) # fast forward merge
1489 elif base != self._state[1]:
1509 elif base != self._state[1]:
1490 self._gitcommand(['merge', '--no-commit', revision])
1510 self._gitcommand(['merge', '--no-commit', revision])
1491 _sanitize(self.ui, self.wvfs, '.git')
1511 _sanitize(self.ui, self.wvfs, '.git')
1492
1512
1493 if self.dirty():
1513 if self.dirty():
1494 if self._gitstate() != revision:
1514 if self._gitstate() != revision:
1495 dirty = self._gitstate() == self._state[1] or code != 0
1515 dirty = self._gitstate() == self._state[1] or code != 0
1496 if _updateprompt(self.ui, self, dirty,
1516 if _updateprompt(self.ui, self, dirty,
1497 self._state[1][:7], revision[:7]):
1517 self._state[1][:7], revision[:7]):
1498 mergefunc()
1518 mergefunc()
1499 else:
1519 else:
1500 mergefunc()
1520 mergefunc()
1501
1521
1502 @annotatesubrepoerror
1522 @annotatesubrepoerror
1503 def push(self, opts):
1523 def push(self, opts):
1504 force = opts.get('force')
1524 force = opts.get('force')
1505
1525
1506 if not self._state[1]:
1526 if not self._state[1]:
1507 return True
1527 return True
1508 if self._gitmissing():
1528 if self._gitmissing():
1509 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1529 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1510 # if a branch in origin contains the revision, nothing to do
1530 # if a branch in origin contains the revision, nothing to do
1511 branch2rev, rev2branch = self._gitbranchmap()
1531 branch2rev, rev2branch = self._gitbranchmap()
1512 if self._state[1] in rev2branch:
1532 if self._state[1] in rev2branch:
1513 for b in rev2branch[self._state[1]]:
1533 for b in rev2branch[self._state[1]]:
1514 if b.startswith('refs/remotes/origin/'):
1534 if b.startswith('refs/remotes/origin/'):
1515 return True
1535 return True
1516 for b, revision in branch2rev.iteritems():
1536 for b, revision in branch2rev.iteritems():
1517 if b.startswith('refs/remotes/origin/'):
1537 if b.startswith('refs/remotes/origin/'):
1518 if self._gitisancestor(self._state[1], revision):
1538 if self._gitisancestor(self._state[1], revision):
1519 return True
1539 return True
1520 # otherwise, try to push the currently checked out branch
1540 # otherwise, try to push the currently checked out branch
1521 cmd = ['push']
1541 cmd = ['push']
1522 if force:
1542 if force:
1523 cmd.append('--force')
1543 cmd.append('--force')
1524
1544
1525 current = self._gitcurrentbranch()
1545 current = self._gitcurrentbranch()
1526 if current:
1546 if current:
1527 # determine if the current branch is even useful
1547 # determine if the current branch is even useful
1528 if not self._gitisancestor(self._state[1], current):
1548 if not self._gitisancestor(self._state[1], current):
1529 self.ui.warn(_('unrelated git branch checked out '
1549 self.ui.warn(_('unrelated git branch checked out '
1530 'in subrepository "%s"\n') % self._relpath)
1550 'in subrepository "%s"\n') % self._relpath)
1531 return False
1551 return False
1532 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1552 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1533 (current.split('/', 2)[2], self._relpath))
1553 (current.split('/', 2)[2], self._relpath))
1534 ret = self._gitdir(cmd + ['origin', current])
1554 ret = self._gitdir(cmd + ['origin', current])
1535 return ret[1] == 0
1555 return ret[1] == 0
1536 else:
1556 else:
1537 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1557 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1538 'cannot push revision %s\n') %
1558 'cannot push revision %s\n') %
1539 (self._relpath, self._state[1]))
1559 (self._relpath, self._state[1]))
1540 return False
1560 return False
1541
1561
1542 @annotatesubrepoerror
1562 @annotatesubrepoerror
1543 def add(self, ui, match, prefix, explicitonly, **opts):
1563 def add(self, ui, match, prefix, explicitonly, **opts):
1544 if self._gitmissing():
1564 if self._gitmissing():
1545 return []
1565 return []
1546
1566
1547 (modified, added, removed,
1567 (modified, added, removed,
1548 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1568 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1549 clean=True)
1569 clean=True)
1550
1570
1551 tracked = set()
1571 tracked = set()
1552 # dirstates 'amn' warn, 'r' is added again
1572 # dirstates 'amn' warn, 'r' is added again
1553 for l in (modified, added, deleted, clean):
1573 for l in (modified, added, deleted, clean):
1554 tracked.update(l)
1574 tracked.update(l)
1555
1575
1556 # Unknown files not of interest will be rejected by the matcher
1576 # Unknown files not of interest will be rejected by the matcher
1557 files = unknown
1577 files = unknown
1558 files.extend(match.files())
1578 files.extend(match.files())
1559
1579
1560 rejected = []
1580 rejected = []
1561
1581
1562 files = [f for f in sorted(set(files)) if match(f)]
1582 files = [f for f in sorted(set(files)) if match(f)]
1563 for f in files:
1583 for f in files:
1564 exact = match.exact(f)
1584 exact = match.exact(f)
1565 command = ["add"]
1585 command = ["add"]
1566 if exact:
1586 if exact:
1567 command.append("-f") #should be added, even if ignored
1587 command.append("-f") #should be added, even if ignored
1568 if ui.verbose or not exact:
1588 if ui.verbose or not exact:
1569 ui.status(_('adding %s\n') % match.rel(f))
1589 ui.status(_('adding %s\n') % match.rel(f))
1570
1590
1571 if f in tracked: # hg prints 'adding' even if already tracked
1591 if f in tracked: # hg prints 'adding' even if already tracked
1572 if exact:
1592 if exact:
1573 rejected.append(f)
1593 rejected.append(f)
1574 continue
1594 continue
1575 if not opts.get(r'dry_run'):
1595 if not opts.get(r'dry_run'):
1576 self._gitcommand(command + [f])
1596 self._gitcommand(command + [f])
1577
1597
1578 for f in rejected:
1598 for f in rejected:
1579 ui.warn(_("%s already tracked!\n") % match.abs(f))
1599 ui.warn(_("%s already tracked!\n") % match.abs(f))
1580
1600
1581 return rejected
1601 return rejected
1582
1602
1583 @annotatesubrepoerror
1603 @annotatesubrepoerror
1584 def remove(self):
1604 def remove(self):
1585 if self._gitmissing():
1605 if self._gitmissing():
1586 return
1606 return
1587 if self.dirty():
1607 if self.dirty():
1588 self.ui.warn(_('not removing repo %s because '
1608 self.ui.warn(_('not removing repo %s because '
1589 'it has changes.\n') % self._relpath)
1609 'it has changes.\n') % self._relpath)
1590 return
1610 return
1591 # we can't fully delete the repository as it may contain
1611 # we can't fully delete the repository as it may contain
1592 # local-only history
1612 # local-only history
1593 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1613 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1594 self._gitcommand(['config', 'core.bare', 'true'])
1614 self._gitcommand(['config', 'core.bare', 'true'])
1595 for f, kind in self.wvfs.readdir():
1615 for f, kind in self.wvfs.readdir():
1596 if f == '.git':
1616 if f == '.git':
1597 continue
1617 continue
1598 if kind == stat.S_IFDIR:
1618 if kind == stat.S_IFDIR:
1599 self.wvfs.rmtree(f)
1619 self.wvfs.rmtree(f)
1600 else:
1620 else:
1601 self.wvfs.unlink(f)
1621 self.wvfs.unlink(f)
1602
1622
1603 def archive(self, archiver, prefix, match=None, decode=True):
1623 def archive(self, archiver, prefix, match=None, decode=True):
1604 total = 0
1624 total = 0
1605 source, revision = self._state
1625 source, revision = self._state
1606 if not revision:
1626 if not revision:
1607 return total
1627 return total
1608 self._fetch(source, revision)
1628 self._fetch(source, revision)
1609
1629
1610 # Parse git's native archive command.
1630 # Parse git's native archive command.
1611 # This should be much faster than manually traversing the trees
1631 # This should be much faster than manually traversing the trees
1612 # and objects with many subprocess calls.
1632 # and objects with many subprocess calls.
1613 tarstream = self._gitcommand(['archive', revision], stream=True)
1633 tarstream = self._gitcommand(['archive', revision], stream=True)
1614 tar = tarfile.open(fileobj=tarstream, mode='r|')
1634 tar = tarfile.open(fileobj=tarstream, mode='r|')
1615 relpath = subrelpath(self)
1635 relpath = subrelpath(self)
1616 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1636 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1617 for i, info in enumerate(tar):
1637 for i, info in enumerate(tar):
1618 if info.isdir():
1638 if info.isdir():
1619 continue
1639 continue
1620 if match and not match(info.name):
1640 if match and not match(info.name):
1621 continue
1641 continue
1622 if info.issym():
1642 if info.issym():
1623 data = info.linkname
1643 data = info.linkname
1624 else:
1644 else:
1625 data = tar.extractfile(info).read()
1645 data = tar.extractfile(info).read()
1626 archiver.addfile(prefix + self._path + '/' + info.name,
1646 archiver.addfile(prefix + self._path + '/' + info.name,
1627 info.mode, info.issym(), data)
1647 info.mode, info.issym(), data)
1628 total += 1
1648 total += 1
1629 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1649 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1630 unit=_('files'))
1650 unit=_('files'))
1631 self.ui.progress(_('archiving (%s)') % relpath, None)
1651 self.ui.progress(_('archiving (%s)') % relpath, None)
1632 return total
1652 return total
1633
1653
1634
1654
1635 @annotatesubrepoerror
1655 @annotatesubrepoerror
1636 def cat(self, match, fm, fntemplate, prefix, **opts):
1656 def cat(self, match, fm, fntemplate, prefix, **opts):
1637 rev = self._state[1]
1657 rev = self._state[1]
1638 if match.anypats():
1658 if match.anypats():
1639 return 1 #No support for include/exclude yet
1659 return 1 #No support for include/exclude yet
1640
1660
1641 if not match.files():
1661 if not match.files():
1642 return 1
1662 return 1
1643
1663
1644 # TODO: add support for non-plain formatter (see cmdutil.cat())
1664 # TODO: add support for non-plain formatter (see cmdutil.cat())
1645 for f in match.files():
1665 for f in match.files():
1646 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1666 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1647 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1667 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1648 pathname=self.wvfs.reljoin(prefix, f))
1668 pathname=self.wvfs.reljoin(prefix, f))
1649 fp.write(output)
1669 fp.write(output)
1650 fp.close()
1670 fp.close()
1651 return 0
1671 return 0
1652
1672
1653
1673
1654 @annotatesubrepoerror
1674 @annotatesubrepoerror
1655 def status(self, rev2, **opts):
1675 def status(self, rev2, **opts):
1656 rev1 = self._state[1]
1676 rev1 = self._state[1]
1657 if self._gitmissing() or not rev1:
1677 if self._gitmissing() or not rev1:
1658 # if the repo is missing, return no results
1678 # if the repo is missing, return no results
1659 return scmutil.status([], [], [], [], [], [], [])
1679 return scmutil.status([], [], [], [], [], [], [])
1660 modified, added, removed = [], [], []
1680 modified, added, removed = [], [], []
1661 self._gitupdatestat()
1681 self._gitupdatestat()
1662 if rev2:
1682 if rev2:
1663 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1683 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1664 else:
1684 else:
1665 command = ['diff-index', '--no-renames', rev1]
1685 command = ['diff-index', '--no-renames', rev1]
1666 out = self._gitcommand(command)
1686 out = self._gitcommand(command)
1667 for line in out.split('\n'):
1687 for line in out.split('\n'):
1668 tab = line.find('\t')
1688 tab = line.find('\t')
1669 if tab == -1:
1689 if tab == -1:
1670 continue
1690 continue
1671 status, f = line[tab - 1], line[tab + 1:]
1691 status, f = line[tab - 1], line[tab + 1:]
1672 if status == 'M':
1692 if status == 'M':
1673 modified.append(f)
1693 modified.append(f)
1674 elif status == 'A':
1694 elif status == 'A':
1675 added.append(f)
1695 added.append(f)
1676 elif status == 'D':
1696 elif status == 'D':
1677 removed.append(f)
1697 removed.append(f)
1678
1698
1679 deleted, unknown, ignored, clean = [], [], [], []
1699 deleted, unknown, ignored, clean = [], [], [], []
1680
1700
1681 command = ['status', '--porcelain', '-z']
1701 command = ['status', '--porcelain', '-z']
1682 if opts.get(r'unknown'):
1702 if opts.get(r'unknown'):
1683 command += ['--untracked-files=all']
1703 command += ['--untracked-files=all']
1684 if opts.get(r'ignored'):
1704 if opts.get(r'ignored'):
1685 command += ['--ignored']
1705 command += ['--ignored']
1686 out = self._gitcommand(command)
1706 out = self._gitcommand(command)
1687
1707
1688 changedfiles = set()
1708 changedfiles = set()
1689 changedfiles.update(modified)
1709 changedfiles.update(modified)
1690 changedfiles.update(added)
1710 changedfiles.update(added)
1691 changedfiles.update(removed)
1711 changedfiles.update(removed)
1692 for line in out.split('\0'):
1712 for line in out.split('\0'):
1693 if not line:
1713 if not line:
1694 continue
1714 continue
1695 st = line[0:2]
1715 st = line[0:2]
1696 #moves and copies show 2 files on one line
1716 #moves and copies show 2 files on one line
1697 if line.find('\0') >= 0:
1717 if line.find('\0') >= 0:
1698 filename1, filename2 = line[3:].split('\0')
1718 filename1, filename2 = line[3:].split('\0')
1699 else:
1719 else:
1700 filename1 = line[3:]
1720 filename1 = line[3:]
1701 filename2 = None
1721 filename2 = None
1702
1722
1703 changedfiles.add(filename1)
1723 changedfiles.add(filename1)
1704 if filename2:
1724 if filename2:
1705 changedfiles.add(filename2)
1725 changedfiles.add(filename2)
1706
1726
1707 if st == '??':
1727 if st == '??':
1708 unknown.append(filename1)
1728 unknown.append(filename1)
1709 elif st == '!!':
1729 elif st == '!!':
1710 ignored.append(filename1)
1730 ignored.append(filename1)
1711
1731
1712 if opts.get(r'clean'):
1732 if opts.get(r'clean'):
1713 out = self._gitcommand(['ls-files'])
1733 out = self._gitcommand(['ls-files'])
1714 for f in out.split('\n'):
1734 for f in out.split('\n'):
1715 if not f in changedfiles:
1735 if not f in changedfiles:
1716 clean.append(f)
1736 clean.append(f)
1717
1737
1718 return scmutil.status(modified, added, removed, deleted,
1738 return scmutil.status(modified, added, removed, deleted,
1719 unknown, ignored, clean)
1739 unknown, ignored, clean)
1720
1740
1721 @annotatesubrepoerror
1741 @annotatesubrepoerror
1722 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1742 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1723 node1 = self._state[1]
1743 node1 = self._state[1]
1724 cmd = ['diff', '--no-renames']
1744 cmd = ['diff', '--no-renames']
1725 if opts[r'stat']:
1745 if opts[r'stat']:
1726 cmd.append('--stat')
1746 cmd.append('--stat')
1727 else:
1747 else:
1728 # for Git, this also implies '-p'
1748 # for Git, this also implies '-p'
1729 cmd.append('-U%d' % diffopts.context)
1749 cmd.append('-U%d' % diffopts.context)
1730
1750
1731 gitprefix = self.wvfs.reljoin(prefix, self._path)
1751 gitprefix = self.wvfs.reljoin(prefix, self._path)
1732
1752
1733 if diffopts.noprefix:
1753 if diffopts.noprefix:
1734 cmd.extend(['--src-prefix=%s/' % gitprefix,
1754 cmd.extend(['--src-prefix=%s/' % gitprefix,
1735 '--dst-prefix=%s/' % gitprefix])
1755 '--dst-prefix=%s/' % gitprefix])
1736 else:
1756 else:
1737 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1757 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1738 '--dst-prefix=b/%s/' % gitprefix])
1758 '--dst-prefix=b/%s/' % gitprefix])
1739
1759
1740 if diffopts.ignorews:
1760 if diffopts.ignorews:
1741 cmd.append('--ignore-all-space')
1761 cmd.append('--ignore-all-space')
1742 if diffopts.ignorewsamount:
1762 if diffopts.ignorewsamount:
1743 cmd.append('--ignore-space-change')
1763 cmd.append('--ignore-space-change')
1744 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1764 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1745 and diffopts.ignoreblanklines:
1765 and diffopts.ignoreblanklines:
1746 cmd.append('--ignore-blank-lines')
1766 cmd.append('--ignore-blank-lines')
1747
1767
1748 cmd.append(node1)
1768 cmd.append(node1)
1749 if node2:
1769 if node2:
1750 cmd.append(node2)
1770 cmd.append(node2)
1751
1771
1752 output = ""
1772 output = ""
1753 if match.always():
1773 if match.always():
1754 output += self._gitcommand(cmd) + '\n'
1774 output += self._gitcommand(cmd) + '\n'
1755 else:
1775 else:
1756 st = self.status(node2)[:3]
1776 st = self.status(node2)[:3]
1757 files = [f for sublist in st for f in sublist]
1777 files = [f for sublist in st for f in sublist]
1758 for f in files:
1778 for f in files:
1759 if match(f):
1779 if match(f):
1760 output += self._gitcommand(cmd + ['--', f]) + '\n'
1780 output += self._gitcommand(cmd + ['--', f]) + '\n'
1761
1781
1762 if output.strip():
1782 if output.strip():
1763 ui.write(output)
1783 ui.write(output)
1764
1784
1765 @annotatesubrepoerror
1785 @annotatesubrepoerror
1766 def revert(self, substate, *pats, **opts):
1786 def revert(self, substate, *pats, **opts):
1767 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1787 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1768 if not opts.get(r'no_backup'):
1788 if not opts.get(r'no_backup'):
1769 status = self.status(None)
1789 status = self.status(None)
1770 names = status.modified
1790 names = status.modified
1771 for name in names:
1791 for name in names:
1772 bakname = scmutil.origpath(self.ui, self._subparent, name)
1792 bakname = scmutil.origpath(self.ui, self._subparent, name)
1773 self.ui.note(_('saving current version of %s as %s\n') %
1793 self.ui.note(_('saving current version of %s as %s\n') %
1774 (name, bakname))
1794 (name, bakname))
1775 self.wvfs.rename(name, bakname)
1795 self.wvfs.rename(name, bakname)
1776
1796
1777 if not opts.get(r'dry_run'):
1797 if not opts.get(r'dry_run'):
1778 self.get(substate, overwrite=True)
1798 self.get(substate, overwrite=True)
1779 return []
1799 return []
1780
1800
1781 def shortid(self, revid):
1801 def shortid(self, revid):
1782 return revid[:7]
1802 return revid[:7]
1783
1803
1784 types = {
1804 types = {
1785 'hg': hgsubrepo,
1805 'hg': hgsubrepo,
1786 'svn': svnsubrepo,
1806 'svn': svnsubrepo,
1787 'git': gitsubrepo,
1807 'git': gitsubrepo,
1788 }
1808 }
@@ -1,1003 +1,1035 b''
1 $ HGMERGE=true; export HGMERGE
1 $ HGMERGE=true; export HGMERGE
2
2
3 init
3 init
4
4
5 $ hg init repo
5 $ hg init repo
6 $ cd repo
6 $ cd repo
7
7
8 commit
8 commit
9
9
10 $ echo 'a' > a
10 $ echo 'a' > a
11 $ hg ci -A -m test -u nobody -d '1 0'
11 $ hg ci -A -m test -u nobody -d '1 0'
12 adding a
12 adding a
13
13
14 annotate -c
14 annotate -c
15
15
16 $ hg annotate -c a
16 $ hg annotate -c a
17 8435f90966e4: a
17 8435f90966e4: a
18
18
19 annotate -cl
19 annotate -cl
20
20
21 $ hg annotate -cl a
21 $ hg annotate -cl a
22 8435f90966e4:1: a
22 8435f90966e4:1: a
23
23
24 annotate -d
24 annotate -d
25
25
26 $ hg annotate -d a
26 $ hg annotate -d a
27 Thu Jan 01 00:00:01 1970 +0000: a
27 Thu Jan 01 00:00:01 1970 +0000: a
28
28
29 annotate -n
29 annotate -n
30
30
31 $ hg annotate -n a
31 $ hg annotate -n a
32 0: a
32 0: a
33
33
34 annotate -nl
34 annotate -nl
35
35
36 $ hg annotate -nl a
36 $ hg annotate -nl a
37 0:1: a
37 0:1: a
38
38
39 annotate -u
39 annotate -u
40
40
41 $ hg annotate -u a
41 $ hg annotate -u a
42 nobody: a
42 nobody: a
43
43
44 annotate -cdnu
44 annotate -cdnu
45
45
46 $ hg annotate -cdnu a
46 $ hg annotate -cdnu a
47 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
47 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a
48
48
49 annotate -cdnul
49 annotate -cdnul
50
50
51 $ hg annotate -cdnul a
51 $ hg annotate -cdnul a
52 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
52 nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a
53
53
54 annotate (JSON)
54 annotate (JSON)
55
55
56 $ hg annotate -Tjson a
56 $ hg annotate -Tjson a
57 [
57 [
58 {
58 {
59 "abspath": "a",
59 "abspath": "a",
60 "lines": [{"line": "a\n", "rev": 0}],
60 "lines": [{"line": "a\n", "rev": 0}],
61 "path": "a"
61 "path": "a"
62 }
62 }
63 ]
63 ]
64
64
65 $ hg annotate -Tjson -cdfnul a
65 $ hg annotate -Tjson -cdfnul a
66 [
66 [
67 {
67 {
68 "abspath": "a",
68 "abspath": "a",
69 "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
69 "lines": [{"date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody"}],
70 "path": "a"
70 "path": "a"
71 }
71 }
72 ]
72 ]
73
73
74 $ cat <<EOF >>a
74 $ cat <<EOF >>a
75 > a
75 > a
76 > a
76 > a
77 > EOF
77 > EOF
78 $ hg ci -ma1 -d '1 0'
78 $ hg ci -ma1 -d '1 0'
79 $ hg cp a b
79 $ hg cp a b
80 $ hg ci -mb -d '1 0'
80 $ hg ci -mb -d '1 0'
81 $ cat <<EOF >> b
81 $ cat <<EOF >> b
82 > b4
82 > b4
83 > b5
83 > b5
84 > b6
84 > b6
85 > EOF
85 > EOF
86 $ hg ci -mb2 -d '2 0'
86 $ hg ci -mb2 -d '2 0'
87
87
88 annotate multiple files (JSON)
88 annotate multiple files (JSON)
89
89
90 $ hg annotate -Tjson a b
90 $ hg annotate -Tjson a b
91 [
91 [
92 {
92 {
93 "abspath": "a",
93 "abspath": "a",
94 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
94 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}],
95 "path": "a"
95 "path": "a"
96 },
96 },
97 {
97 {
98 "abspath": "b",
98 "abspath": "b",
99 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
99 "lines": [{"line": "a\n", "rev": 0}, {"line": "a\n", "rev": 1}, {"line": "a\n", "rev": 1}, {"line": "b4\n", "rev": 3}, {"line": "b5\n", "rev": 3}, {"line": "b6\n", "rev": 3}],
100 "path": "b"
100 "path": "b"
101 }
101 }
102 ]
102 ]
103
103
104 annotate multiple files (template)
104 annotate multiple files (template)
105
105
106 $ hg annotate -T'== {abspath} ==\n{lines % "{rev}: {line}"}' a b
106 $ hg annotate -T'== {abspath} ==\n{lines % "{rev}: {line}"}' a b
107 == a ==
107 == a ==
108 0: a
108 0: a
109 1: a
109 1: a
110 1: a
110 1: a
111 == b ==
111 == b ==
112 0: a
112 0: a
113 1: a
113 1: a
114 1: a
114 1: a
115 3: b4
115 3: b4
116 3: b5
116 3: b5
117 3: b6
117 3: b6
118
118
119 annotate -n b
119 annotate -n b
120
120
121 $ hg annotate -n b
121 $ hg annotate -n b
122 0: a
122 0: a
123 1: a
123 1: a
124 1: a
124 1: a
125 3: b4
125 3: b4
126 3: b5
126 3: b5
127 3: b6
127 3: b6
128
128
129 annotate --no-follow b
129 annotate --no-follow b
130
130
131 $ hg annotate --no-follow b
131 $ hg annotate --no-follow b
132 2: a
132 2: a
133 2: a
133 2: a
134 2: a
134 2: a
135 3: b4
135 3: b4
136 3: b5
136 3: b5
137 3: b6
137 3: b6
138
138
139 annotate -nl b
139 annotate -nl b
140
140
141 $ hg annotate -nl b
141 $ hg annotate -nl b
142 0:1: a
142 0:1: a
143 1:2: a
143 1:2: a
144 1:3: a
144 1:3: a
145 3:4: b4
145 3:4: b4
146 3:5: b5
146 3:5: b5
147 3:6: b6
147 3:6: b6
148
148
149 annotate -nf b
149 annotate -nf b
150
150
151 $ hg annotate -nf b
151 $ hg annotate -nf b
152 0 a: a
152 0 a: a
153 1 a: a
153 1 a: a
154 1 a: a
154 1 a: a
155 3 b: b4
155 3 b: b4
156 3 b: b5
156 3 b: b5
157 3 b: b6
157 3 b: b6
158
158
159 annotate -nlf b
159 annotate -nlf b
160
160
161 $ hg annotate -nlf b
161 $ hg annotate -nlf b
162 0 a:1: a
162 0 a:1: a
163 1 a:2: a
163 1 a:2: a
164 1 a:3: a
164 1 a:3: a
165 3 b:4: b4
165 3 b:4: b4
166 3 b:5: b5
166 3 b:5: b5
167 3 b:6: b6
167 3 b:6: b6
168
168
169 $ hg up -C 2
169 $ hg up -C 2
170 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 $ cat <<EOF >> b
171 $ cat <<EOF >> b
172 > b4
172 > b4
173 > c
173 > c
174 > b5
174 > b5
175 > EOF
175 > EOF
176 $ hg ci -mb2.1 -d '2 0'
176 $ hg ci -mb2.1 -d '2 0'
177 created new head
177 created new head
178 $ hg merge
178 $ hg merge
179 merging b
179 merging b
180 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
180 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
181 (branch merge, don't forget to commit)
181 (branch merge, don't forget to commit)
182 $ hg ci -mmergeb -d '3 0'
182 $ hg ci -mmergeb -d '3 0'
183
183
184 annotate after merge
184 annotate after merge
185
185
186 $ hg annotate -nf b
186 $ hg annotate -nf b
187 0 a: a
187 0 a: a
188 1 a: a
188 1 a: a
189 1 a: a
189 1 a: a
190 3 b: b4
190 3 b: b4
191 4 b: c
191 4 b: c
192 3 b: b5
192 3 b: b5
193
193
194 annotate after merge with -l
194 annotate after merge with -l
195
195
196 $ hg annotate -nlf b
196 $ hg annotate -nlf b
197 0 a:1: a
197 0 a:1: a
198 1 a:2: a
198 1 a:2: a
199 1 a:3: a
199 1 a:3: a
200 3 b:4: b4
200 3 b:4: b4
201 4 b:5: c
201 4 b:5: c
202 3 b:5: b5
202 3 b:5: b5
203
203
204 $ hg up -C 1
204 $ hg up -C 1
205 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
205 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
206 $ hg cp a b
206 $ hg cp a b
207 $ cat <<EOF > b
207 $ cat <<EOF > b
208 > a
208 > a
209 > z
209 > z
210 > a
210 > a
211 > EOF
211 > EOF
212 $ hg ci -mc -d '3 0'
212 $ hg ci -mc -d '3 0'
213 created new head
213 created new head
214 $ hg merge
214 $ hg merge
215 merging b
215 merging b
216 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
216 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
217 (branch merge, don't forget to commit)
217 (branch merge, don't forget to commit)
218 $ cat <<EOF >> b
218 $ cat <<EOF >> b
219 > b4
219 > b4
220 > c
220 > c
221 > b5
221 > b5
222 > EOF
222 > EOF
223 $ echo d >> b
223 $ echo d >> b
224 $ hg ci -mmerge2 -d '4 0'
224 $ hg ci -mmerge2 -d '4 0'
225
225
226 annotate after rename merge
226 annotate after rename merge
227
227
228 $ hg annotate -nf b
228 $ hg annotate -nf b
229 0 a: a
229 0 a: a
230 6 b: z
230 6 b: z
231 1 a: a
231 1 a: a
232 3 b: b4
232 3 b: b4
233 4 b: c
233 4 b: c
234 3 b: b5
234 3 b: b5
235 7 b: d
235 7 b: d
236
236
237 annotate after rename merge with -l
237 annotate after rename merge with -l
238
238
239 $ hg annotate -nlf b
239 $ hg annotate -nlf b
240 0 a:1: a
240 0 a:1: a
241 6 b:2: z
241 6 b:2: z
242 1 a:3: a
242 1 a:3: a
243 3 b:4: b4
243 3 b:4: b4
244 4 b:5: c
244 4 b:5: c
245 3 b:5: b5
245 3 b:5: b5
246 7 b:7: d
246 7 b:7: d
247
247
248 --skip nothing (should be the same as no --skip at all)
248 --skip nothing (should be the same as no --skip at all)
249
249
250 $ hg annotate -nlf b --skip '1::0'
250 $ hg annotate -nlf b --skip '1::0'
251 0 a:1: a
251 0 a:1: a
252 6 b:2: z
252 6 b:2: z
253 1 a:3: a
253 1 a:3: a
254 3 b:4: b4
254 3 b:4: b4
255 4 b:5: c
255 4 b:5: c
256 3 b:5: b5
256 3 b:5: b5
257 7 b:7: d
257 7 b:7: d
258
258
259 --skip a modified line. Note a slight behavior difference in pure - this is
259 --skip a modified line. Note a slight behavior difference in pure - this is
260 because the pure code comes up with slightly different deltas internally.
260 because the pure code comes up with slightly different deltas internally.
261
261
262 $ hg annotate -nlf b --skip 6
262 $ hg annotate -nlf b --skip 6
263 0 a:1: a
263 0 a:1: a
264 1 a:2* z (no-pure !)
264 1 a:2* z (no-pure !)
265 0 a:1* z (pure !)
265 0 a:1* z (pure !)
266 1 a:3: a
266 1 a:3: a
267 3 b:4: b4
267 3 b:4: b4
268 4 b:5: c
268 4 b:5: c
269 3 b:5: b5
269 3 b:5: b5
270 7 b:7: d
270 7 b:7: d
271
271
272 --skip added lines (and test multiple skip)
272 --skip added lines (and test multiple skip)
273
273
274 $ hg annotate -nlf b --skip 3
274 $ hg annotate -nlf b --skip 3
275 0 a:1: a
275 0 a:1: a
276 6 b:2: z
276 6 b:2: z
277 1 a:3: a
277 1 a:3: a
278 1 a:3* b4
278 1 a:3* b4
279 4 b:5: c
279 4 b:5: c
280 1 a:3* b5
280 1 a:3* b5
281 7 b:7: d
281 7 b:7: d
282
282
283 $ hg annotate -nlf b --skip 4
283 $ hg annotate -nlf b --skip 4
284 0 a:1: a
284 0 a:1: a
285 6 b:2: z
285 6 b:2: z
286 1 a:3: a
286 1 a:3: a
287 3 b:4: b4
287 3 b:4: b4
288 1 a:3* c
288 1 a:3* c
289 3 b:5: b5
289 3 b:5: b5
290 7 b:7: d
290 7 b:7: d
291
291
292 $ hg annotate -nlf b --skip 3 --skip 4
292 $ hg annotate -nlf b --skip 3 --skip 4
293 0 a:1: a
293 0 a:1: a
294 6 b:2: z
294 6 b:2: z
295 1 a:3: a
295 1 a:3: a
296 1 a:3* b4
296 1 a:3* b4
297 1 a:3* c
297 1 a:3* c
298 1 a:3* b5
298 1 a:3* b5
299 7 b:7: d
299 7 b:7: d
300
300
301 $ hg annotate -nlf b --skip 'merge()'
301 $ hg annotate -nlf b --skip 'merge()'
302 0 a:1: a
302 0 a:1: a
303 6 b:2: z
303 6 b:2: z
304 1 a:3: a
304 1 a:3: a
305 3 b:4: b4
305 3 b:4: b4
306 4 b:5: c
306 4 b:5: c
307 3 b:5: b5
307 3 b:5: b5
308 3 b:5* d
308 3 b:5* d
309
309
310 --skip everything -- use the revision the file was introduced in
310 --skip everything -- use the revision the file was introduced in
311
311
312 $ hg annotate -nlf b --skip 'all()'
312 $ hg annotate -nlf b --skip 'all()'
313 0 a:1: a
313 0 a:1: a
314 0 a:1* z
314 0 a:1* z
315 0 a:1* a
315 0 a:1* a
316 0 a:1* b4
316 0 a:1* b4
317 0 a:1* c
317 0 a:1* c
318 0 a:1* b5
318 0 a:1* b5
319 0 a:1* d
319 0 a:1* d
320
320
321 Issue2807: alignment of line numbers with -l
321 Issue2807: alignment of line numbers with -l
322
322
323 $ echo more >> b
323 $ echo more >> b
324 $ hg ci -mmore -d '5 0'
324 $ hg ci -mmore -d '5 0'
325 $ echo more >> b
325 $ echo more >> b
326 $ hg ci -mmore -d '6 0'
326 $ hg ci -mmore -d '6 0'
327 $ echo more >> b
327 $ echo more >> b
328 $ hg ci -mmore -d '7 0'
328 $ hg ci -mmore -d '7 0'
329 $ hg annotate -nlf b
329 $ hg annotate -nlf b
330 0 a: 1: a
330 0 a: 1: a
331 6 b: 2: z
331 6 b: 2: z
332 1 a: 3: a
332 1 a: 3: a
333 3 b: 4: b4
333 3 b: 4: b4
334 4 b: 5: c
334 4 b: 5: c
335 3 b: 5: b5
335 3 b: 5: b5
336 7 b: 7: d
336 7 b: 7: d
337 8 b: 8: more
337 8 b: 8: more
338 9 b: 9: more
338 9 b: 9: more
339 10 b:10: more
339 10 b:10: more
340
340
341 linkrev vs rev
341 linkrev vs rev
342
342
343 $ hg annotate -r tip -n a
343 $ hg annotate -r tip -n a
344 0: a
344 0: a
345 1: a
345 1: a
346 1: a
346 1: a
347
347
348 linkrev vs rev with -l
348 linkrev vs rev with -l
349
349
350 $ hg annotate -r tip -nl a
350 $ hg annotate -r tip -nl a
351 0:1: a
351 0:1: a
352 1:2: a
352 1:2: a
353 1:3: a
353 1:3: a
354
354
355 Issue589: "undelete" sequence leads to crash
355 Issue589: "undelete" sequence leads to crash
356
356
357 annotate was crashing when trying to --follow something
357 annotate was crashing when trying to --follow something
358
358
359 like A -> B -> A
359 like A -> B -> A
360
360
361 generate ABA rename configuration
361 generate ABA rename configuration
362
362
363 $ echo foo > foo
363 $ echo foo > foo
364 $ hg add foo
364 $ hg add foo
365 $ hg ci -m addfoo
365 $ hg ci -m addfoo
366 $ hg rename foo bar
366 $ hg rename foo bar
367 $ hg ci -m renamefoo
367 $ hg ci -m renamefoo
368 $ hg rename bar foo
368 $ hg rename bar foo
369 $ hg ci -m renamebar
369 $ hg ci -m renamebar
370
370
371 annotate after ABA with follow
371 annotate after ABA with follow
372
372
373 $ hg annotate --follow foo
373 $ hg annotate --follow foo
374 foo: foo
374 foo: foo
375
375
376 missing file
376 missing file
377
377
378 $ hg ann nosuchfile
378 $ hg ann nosuchfile
379 abort: nosuchfile: no such file in rev e9e6b4fa872f
379 abort: nosuchfile: no such file in rev e9e6b4fa872f
380 [255]
380 [255]
381
381
382 annotate file without '\n' on last line
382 annotate file without '\n' on last line
383
383
384 $ printf "" > c
384 $ printf "" > c
385 $ hg ci -A -m test -u nobody -d '1 0'
385 $ hg ci -A -m test -u nobody -d '1 0'
386 adding c
386 adding c
387 $ hg annotate c
387 $ hg annotate c
388 $ printf "a\nb" > c
388 $ printf "a\nb" > c
389 $ hg ci -m test
389 $ hg ci -m test
390 $ hg annotate c
390 $ hg annotate c
391 [0-9]+: a (re)
391 [0-9]+: a (re)
392 [0-9]+: b (re)
392 [0-9]+: b (re)
393
393
394 Issue3841: check annotation of the file of which filelog includes
394 Issue3841: check annotation of the file of which filelog includes
395 merging between the revision and its ancestor
395 merging between the revision and its ancestor
396
396
397 to reproduce the situation with recent Mercurial, this script uses (1)
397 to reproduce the situation with recent Mercurial, this script uses (1)
398 "hg debugsetparents" to merge without ancestor check by "hg merge",
398 "hg debugsetparents" to merge without ancestor check by "hg merge",
399 and (2) the extension to allow filelog merging between the revision
399 and (2) the extension to allow filelog merging between the revision
400 and its ancestor by overriding "repo._filecommit".
400 and its ancestor by overriding "repo._filecommit".
401
401
402 $ cat > ../legacyrepo.py <<EOF
402 $ cat > ../legacyrepo.py <<EOF
403 > from __future__ import absolute_import
403 > from __future__ import absolute_import
404 > from mercurial import error, node
404 > from mercurial import error, node
405 > def reposetup(ui, repo):
405 > def reposetup(ui, repo):
406 > class legacyrepo(repo.__class__):
406 > class legacyrepo(repo.__class__):
407 > def _filecommit(self, fctx, manifest1, manifest2,
407 > def _filecommit(self, fctx, manifest1, manifest2,
408 > linkrev, tr, changelist):
408 > linkrev, tr, changelist):
409 > fname = fctx.path()
409 > fname = fctx.path()
410 > text = fctx.data()
410 > text = fctx.data()
411 > flog = self.file(fname)
411 > flog = self.file(fname)
412 > fparent1 = manifest1.get(fname, node.nullid)
412 > fparent1 = manifest1.get(fname, node.nullid)
413 > fparent2 = manifest2.get(fname, node.nullid)
413 > fparent2 = manifest2.get(fname, node.nullid)
414 > meta = {}
414 > meta = {}
415 > copy = fctx.renamed()
415 > copy = fctx.renamed()
416 > if copy and copy[0] != fname:
416 > if copy and copy[0] != fname:
417 > raise error.Abort('copying is not supported')
417 > raise error.Abort('copying is not supported')
418 > if fparent2 != node.nullid:
418 > if fparent2 != node.nullid:
419 > changelist.append(fname)
419 > changelist.append(fname)
420 > return flog.add(text, meta, tr, linkrev,
420 > return flog.add(text, meta, tr, linkrev,
421 > fparent1, fparent2)
421 > fparent1, fparent2)
422 > raise error.Abort('only merging is supported')
422 > raise error.Abort('only merging is supported')
423 > repo.__class__ = legacyrepo
423 > repo.__class__ = legacyrepo
424 > EOF
424 > EOF
425
425
426 $ cat > baz <<EOF
426 $ cat > baz <<EOF
427 > 1
427 > 1
428 > 2
428 > 2
429 > 3
429 > 3
430 > 4
430 > 4
431 > 5
431 > 5
432 > EOF
432 > EOF
433 $ hg add baz
433 $ hg add baz
434 $ hg commit -m "baz:0"
434 $ hg commit -m "baz:0"
435
435
436 $ cat > baz <<EOF
436 $ cat > baz <<EOF
437 > 1 baz:1
437 > 1 baz:1
438 > 2
438 > 2
439 > 3
439 > 3
440 > 4
440 > 4
441 > 5
441 > 5
442 > EOF
442 > EOF
443 $ hg commit -m "baz:1"
443 $ hg commit -m "baz:1"
444
444
445 $ cat > baz <<EOF
445 $ cat > baz <<EOF
446 > 1 baz:1
446 > 1 baz:1
447 > 2 baz:2
447 > 2 baz:2
448 > 3
448 > 3
449 > 4
449 > 4
450 > 5
450 > 5
451 > EOF
451 > EOF
452 $ hg debugsetparents 17 17
452 $ hg debugsetparents 17 17
453 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
453 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2"
454 $ hg debugindexdot .hg/store/data/baz.i
454 $ hg debugindexdot .hg/store/data/baz.i
455 digraph G {
455 digraph G {
456 -1 -> 0
456 -1 -> 0
457 0 -> 1
457 0 -> 1
458 1 -> 2
458 1 -> 2
459 1 -> 2
459 1 -> 2
460 }
460 }
461 $ hg annotate baz
461 $ hg annotate baz
462 17: 1 baz:1
462 17: 1 baz:1
463 18: 2 baz:2
463 18: 2 baz:2
464 16: 3
464 16: 3
465 16: 4
465 16: 4
466 16: 5
466 16: 5
467
467
468 $ cat > baz <<EOF
468 $ cat > baz <<EOF
469 > 1 baz:1
469 > 1 baz:1
470 > 2 baz:2
470 > 2 baz:2
471 > 3 baz:3
471 > 3 baz:3
472 > 4
472 > 4
473 > 5
473 > 5
474 > EOF
474 > EOF
475 $ hg commit -m "baz:3"
475 $ hg commit -m "baz:3"
476
476
477 $ cat > baz <<EOF
477 $ cat > baz <<EOF
478 > 1 baz:1
478 > 1 baz:1
479 > 2 baz:2
479 > 2 baz:2
480 > 3 baz:3
480 > 3 baz:3
481 > 4 baz:4
481 > 4 baz:4
482 > 5
482 > 5
483 > EOF
483 > EOF
484 $ hg debugsetparents 19 18
484 $ hg debugsetparents 19 18
485 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
485 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4"
486 $ hg debugindexdot .hg/store/data/baz.i
486 $ hg debugindexdot .hg/store/data/baz.i
487 digraph G {
487 digraph G {
488 -1 -> 0
488 -1 -> 0
489 0 -> 1
489 0 -> 1
490 1 -> 2
490 1 -> 2
491 1 -> 2
491 1 -> 2
492 2 -> 3
492 2 -> 3
493 3 -> 4
493 3 -> 4
494 2 -> 4
494 2 -> 4
495 }
495 }
496 $ hg annotate baz
496 $ hg annotate baz
497 17: 1 baz:1
497 17: 1 baz:1
498 18: 2 baz:2
498 18: 2 baz:2
499 19: 3 baz:3
499 19: 3 baz:3
500 20: 4 baz:4
500 20: 4 baz:4
501 16: 5
501 16: 5
502
502
503 annotate clean file
503 annotate clean file
504
504
505 $ hg annotate -ncr "wdir()" foo
505 $ hg annotate -ncr "wdir()" foo
506 11 472b18db256d : foo
506 11 472b18db256d : foo
507
507
508 annotate modified file
508 annotate modified file
509
509
510 $ echo foofoo >> foo
510 $ echo foofoo >> foo
511 $ hg annotate -r "wdir()" foo
511 $ hg annotate -r "wdir()" foo
512 11 : foo
512 11 : foo
513 20+: foofoo
513 20+: foofoo
514
514
515 $ hg annotate -cr "wdir()" foo
515 $ hg annotate -cr "wdir()" foo
516 472b18db256d : foo
516 472b18db256d : foo
517 b6bedd5477e7+: foofoo
517 b6bedd5477e7+: foofoo
518
518
519 $ hg annotate -ncr "wdir()" foo
519 $ hg annotate -ncr "wdir()" foo
520 11 472b18db256d : foo
520 11 472b18db256d : foo
521 20 b6bedd5477e7+: foofoo
521 20 b6bedd5477e7+: foofoo
522
522
523 $ hg annotate --debug -ncr "wdir()" foo
523 $ hg annotate --debug -ncr "wdir()" foo
524 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
524 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo
525 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
525 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo
526
526
527 $ hg annotate -udr "wdir()" foo
527 $ hg annotate -udr "wdir()" foo
528 test Thu Jan 01 00:00:00 1970 +0000: foo
528 test Thu Jan 01 00:00:00 1970 +0000: foo
529 test [A-Za-z0-9:+ ]+: foofoo (re)
529 test [A-Za-z0-9:+ ]+: foofoo (re)
530
530
531 $ hg annotate -ncr "wdir()" -Tjson foo
531 $ hg annotate -ncr "wdir()" -Tjson foo
532 [
532 [
533 {
533 {
534 "abspath": "foo",
534 "abspath": "foo",
535 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
535 "lines": [{"line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11}, {"line": "foofoo\n", "node": null, "rev": null}],
536 "path": "foo"
536 "path": "foo"
537 }
537 }
538 ]
538 ]
539
539
540 annotate added file
540 annotate added file
541
541
542 $ echo bar > bar
542 $ echo bar > bar
543 $ hg add bar
543 $ hg add bar
544 $ hg annotate -ncr "wdir()" bar
544 $ hg annotate -ncr "wdir()" bar
545 20 b6bedd5477e7+: bar
545 20 b6bedd5477e7+: bar
546
546
547 annotate renamed file
547 annotate renamed file
548
548
549 $ hg rename foo renamefoo2
549 $ hg rename foo renamefoo2
550 $ hg annotate -ncr "wdir()" renamefoo2
550 $ hg annotate -ncr "wdir()" renamefoo2
551 11 472b18db256d : foo
551 11 472b18db256d : foo
552 20 b6bedd5477e7+: foofoo
552 20 b6bedd5477e7+: foofoo
553
553
554 annotate missing file
554 annotate missing file
555
555
556 $ rm baz
556 $ rm baz
557
557
558 $ hg annotate -ncr "wdir()" baz
558 $ hg annotate -ncr "wdir()" baz
559 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
559 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
560 abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
560 abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
561 [255]
561 [255]
562
562
563 annotate removed file
563 annotate removed file
564
564
565 $ hg rm baz
565 $ hg rm baz
566
566
567 $ hg annotate -ncr "wdir()" baz
567 $ hg annotate -ncr "wdir()" baz
568 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
568 abort: $TESTTMP\repo\baz: $ENOENT$ (windows !)
569 abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
569 abort: $ENOENT$: $TESTTMP/repo/baz (no-windows !)
570 [255]
570 [255]
571
571
572 $ hg revert --all --no-backup --quiet
572 $ hg revert --all --no-backup --quiet
573 $ hg id -n
573 $ hg id -n
574 20
574 20
575
575
576 Test followlines() revset; we usually check both followlines(pat, range) and
576 Test followlines() revset; we usually check both followlines(pat, range) and
577 followlines(pat, range, descend=True) to make sure both give the same result
577 followlines(pat, range, descend=True) to make sure both give the same result
578 when they should.
578 when they should.
579
579
580 $ echo a >> foo
580 $ echo a >> foo
581 $ hg ci -m 'foo: add a'
581 $ hg ci -m 'foo: add a'
582 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5)'
582 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5)'
583 16: baz:0
583 16: baz:0
584 19: baz:3
584 19: baz:3
585 20: baz:4
585 20: baz:4
586 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=20)'
586 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=20)'
587 16: baz:0
587 16: baz:0
588 19: baz:3
588 19: baz:3
589 20: baz:4
589 20: baz:4
590 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19)'
590 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19)'
591 16: baz:0
591 16: baz:0
592 19: baz:3
592 19: baz:3
593 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=True)'
593 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=True)'
594 19: baz:3
594 19: baz:3
595 20: baz:4
595 20: baz:4
596 $ printf "0\n0\n" | cat - baz > baz1
596 $ printf "0\n0\n" | cat - baz > baz1
597 $ mv baz1 baz
597 $ mv baz1 baz
598 $ hg ci -m 'added two lines with 0'
598 $ hg ci -m 'added two lines with 0'
599 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
599 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
600 16: baz:0
600 16: baz:0
601 19: baz:3
601 19: baz:3
602 20: baz:4
602 20: baz:4
603 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, descend=true, startrev=19)'
603 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, descend=true, startrev=19)'
604 19: baz:3
604 19: baz:3
605 20: baz:4
605 20: baz:4
606 $ echo 6 >> baz
606 $ echo 6 >> baz
607 $ hg ci -m 'added line 8'
607 $ hg ci -m 'added line 8'
608 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
608 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
609 16: baz:0
609 16: baz:0
610 19: baz:3
610 19: baz:3
611 20: baz:4
611 20: baz:4
612 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=1)'
612 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=19, descend=1)'
613 19: baz:3
613 19: baz:3
614 20: baz:4
614 20: baz:4
615 $ sed 's/3/3+/' baz > baz.new
615 $ sed 's/3/3+/' baz > baz.new
616 $ mv baz.new baz
616 $ mv baz.new baz
617 $ hg ci -m 'baz:3->3+'
617 $ hg ci -m 'baz:3->3+'
618 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, descend=0)'
618 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, descend=0)'
619 16: baz:0
619 16: baz:0
620 19: baz:3
620 19: baz:3
621 20: baz:4
621 20: baz:4
622 24: baz:3->3+
622 24: baz:3->3+
623 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=17, descend=True)'
623 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:5, startrev=17, descend=True)'
624 19: baz:3
624 19: baz:3
625 20: baz:4
625 20: baz:4
626 24: baz:3->3+
626 24: baz:3->3+
627 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 1:2, descend=false)'
627 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 1:2, descend=false)'
628 22: added two lines with 0
628 22: added two lines with 0
629
629
630 file patterns are okay
630 file patterns are okay
631 $ hg log -T '{rev}: {desc}\n' -r 'followlines("path:baz", 1:2)'
631 $ hg log -T '{rev}: {desc}\n' -r 'followlines("path:baz", 1:2)'
632 22: added two lines with 0
632 22: added two lines with 0
633
633
634 renames are followed
634 renames are followed
635 $ hg mv baz qux
635 $ hg mv baz qux
636 $ sed 's/4/4+/' qux > qux.new
636 $ sed 's/4/4+/' qux > qux.new
637 $ mv qux.new qux
637 $ mv qux.new qux
638 $ hg ci -m 'qux:4->4+'
638 $ hg ci -m 'qux:4->4+'
639 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
639 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
640 16: baz:0
640 16: baz:0
641 19: baz:3
641 19: baz:3
642 20: baz:4
642 20: baz:4
643 24: baz:3->3+
643 24: baz:3->3+
644 25: qux:4->4+
644 25: qux:4->4+
645
645
646 but are missed when following children
646 but are missed when following children
647 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=22, descend=True)'
647 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=22, descend=True)'
648 24: baz:3->3+
648 24: baz:3->3+
649
649
650 merge
650 merge
651 $ hg up 24 --quiet
651 $ hg up 24 --quiet
652 $ echo 7 >> baz
652 $ echo 7 >> baz
653 $ hg ci -m 'one more line, out of line range'
653 $ hg ci -m 'one more line, out of line range'
654 created new head
654 created new head
655 $ sed 's/3+/3-/' baz > baz.new
655 $ sed 's/3+/3-/' baz > baz.new
656 $ mv baz.new baz
656 $ mv baz.new baz
657 $ hg ci -m 'baz:3+->3-'
657 $ hg ci -m 'baz:3+->3-'
658 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
658 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7)'
659 16: baz:0
659 16: baz:0
660 19: baz:3
660 19: baz:3
661 20: baz:4
661 20: baz:4
662 24: baz:3->3+
662 24: baz:3->3+
663 27: baz:3+->3-
663 27: baz:3+->3-
664 $ hg merge 25
664 $ hg merge 25
665 merging baz and qux to qux
665 merging baz and qux to qux
666 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
666 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
667 (branch merge, don't forget to commit)
667 (branch merge, don't forget to commit)
668 $ hg ci -m merge
668 $ hg ci -m merge
669 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
669 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
670 16: baz:0
670 16: baz:0
671 19: baz:3
671 19: baz:3
672 20: baz:4
672 20: baz:4
673 24: baz:3->3+
673 24: baz:3->3+
674 25: qux:4->4+
674 25: qux:4->4+
675 27: baz:3+->3-
675 27: baz:3+->3-
676 28: merge
676 28: merge
677 $ hg up 25 --quiet
677 $ hg up 25 --quiet
678 $ hg merge 27
678 $ hg merge 27
679 merging qux and baz to qux
679 merging qux and baz to qux
680 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
680 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
681 (branch merge, don't forget to commit)
681 (branch merge, don't forget to commit)
682 $ hg ci -m 'merge from other side'
682 $ hg ci -m 'merge from other side'
683 created new head
683 created new head
684 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
684 $ hg log -T '{rev}: {desc}\n' -r 'followlines(qux, 5:7)'
685 16: baz:0
685 16: baz:0
686 19: baz:3
686 19: baz:3
687 20: baz:4
687 20: baz:4
688 24: baz:3->3+
688 24: baz:3->3+
689 25: qux:4->4+
689 25: qux:4->4+
690 27: baz:3+->3-
690 27: baz:3+->3-
691 29: merge from other side
691 29: merge from other side
692 $ hg up 24 --quiet
692 $ hg up 24 --quiet
693
693
694 we are missing the branch with rename when following children
694 we are missing the branch with rename when following children
695 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=26, descend=True)'
695 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 5:7, startrev=26, descend=True)'
696 27: baz:3+->3-
696 27: baz:3+->3-
697
697
698 we follow all branches in descending direction
698 we follow all branches in descending direction
699 $ hg up 23 --quiet
699 $ hg up 23 --quiet
700 $ sed 's/3/+3/' baz > baz.new
700 $ sed 's/3/+3/' baz > baz.new
701 $ mv baz.new baz
701 $ mv baz.new baz
702 $ hg ci -m 'baz:3->+3'
702 $ hg ci -m 'baz:3->+3'
703 created new head
703 created new head
704 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 2:5, startrev=16, descend=True)' --graph
704 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 2:5, startrev=16, descend=True)' --graph
705 @ 30: baz:3->+3
705 @ 30: baz:3->+3
706 :
706 :
707 : o 27: baz:3+->3-
707 : o 27: baz:3+->3-
708 : :
708 : :
709 : o 24: baz:3->3+
709 : o 24: baz:3->3+
710 :/
710 :/
711 o 20: baz:4
711 o 20: baz:4
712 |\
712 |\
713 | o 19: baz:3
713 | o 19: baz:3
714 |/
714 |/
715 o 18: baz:2
715 o 18: baz:2
716 :
716 :
717 o 16: baz:0
717 o 16: baz:0
718 |
718 |
719 ~
719 ~
720
720
721 Issue5595: on a merge changeset with different line ranges depending on
721 Issue5595: on a merge changeset with different line ranges depending on
722 parent, be conservative and use the surrounding interval to avoid loosing
722 parent, be conservative and use the surrounding interval to avoid loosing
723 track of possible further descendants in specified range.
723 track of possible further descendants in specified range.
724
724
725 $ hg up 23 --quiet
725 $ hg up 23 --quiet
726 $ hg cat baz -r 24
726 $ hg cat baz -r 24
727 0
727 0
728 0
728 0
729 1 baz:1
729 1 baz:1
730 2 baz:2
730 2 baz:2
731 3+ baz:3
731 3+ baz:3
732 4 baz:4
732 4 baz:4
733 5
733 5
734 6
734 6
735 $ cat > baz << EOF
735 $ cat > baz << EOF
736 > 0
736 > 0
737 > 0
737 > 0
738 > a
738 > a
739 > b
739 > b
740 > 3+ baz:3
740 > 3+ baz:3
741 > 4 baz:4
741 > 4 baz:4
742 > y
742 > y
743 > z
743 > z
744 > EOF
744 > EOF
745 $ hg ci -m 'baz: mostly rewrite with some content from 24'
745 $ hg ci -m 'baz: mostly rewrite with some content from 24'
746 created new head
746 created new head
747 $ hg merge --tool :merge-other 24
747 $ hg merge --tool :merge-other 24
748 merging baz
748 merging baz
749 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
749 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
750 (branch merge, don't forget to commit)
750 (branch merge, don't forget to commit)
751 $ hg ci -m 'merge forgetting about baz rewrite'
751 $ hg ci -m 'merge forgetting about baz rewrite'
752 $ cat > baz << EOF
752 $ cat > baz << EOF
753 > 0
753 > 0
754 > 0
754 > 0
755 > 1 baz:1
755 > 1 baz:1
756 > 2+ baz:2
756 > 2+ baz:2
757 > 3+ baz:3
757 > 3+ baz:3
758 > 4 baz:4
758 > 4 baz:4
759 > 5
759 > 5
760 > 6
760 > 6
761 > EOF
761 > EOF
762 $ hg ci -m 'baz: narrow change (2->2+)'
762 $ hg ci -m 'baz: narrow change (2->2+)'
763 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:4, startrev=20, descend=True)' --graph
763 $ hg log -T '{rev}: {desc}\n' -r 'followlines(baz, 3:4, startrev=20, descend=True)' --graph
764 @ 33: baz: narrow change (2->2+)
764 @ 33: baz: narrow change (2->2+)
765 |
765 |
766 o 32: merge forgetting about baz rewrite
766 o 32: merge forgetting about baz rewrite
767 |\
767 |\
768 | o 31: baz: mostly rewrite with some content from 24
768 | o 31: baz: mostly rewrite with some content from 24
769 | :
769 | :
770 | : o 30: baz:3->+3
770 | : o 30: baz:3->+3
771 | :/
771 | :/
772 +---o 27: baz:3+->3-
772 +---o 27: baz:3+->3-
773 | :
773 | :
774 o : 24: baz:3->3+
774 o : 24: baz:3->3+
775 :/
775 :/
776 o 20: baz:4
776 o 20: baz:4
777 |\
777 |\
778 ~ ~
778 ~ ~
779
779
780 check error cases
780 check error cases
781 $ hg up 24 --quiet
781 $ hg up 24 --quiet
782 $ hg log -r 'followlines()'
782 $ hg log -r 'followlines()'
783 hg: parse error: followlines takes at least 1 positional arguments
783 hg: parse error: followlines takes at least 1 positional arguments
784 [255]
784 [255]
785 $ hg log -r 'followlines(baz)'
785 $ hg log -r 'followlines(baz)'
786 hg: parse error: followlines requires a line range
786 hg: parse error: followlines requires a line range
787 [255]
787 [255]
788 $ hg log -r 'followlines(baz, 1)'
788 $ hg log -r 'followlines(baz, 1)'
789 hg: parse error: followlines expects a line range
789 hg: parse error: followlines expects a line range
790 [255]
790 [255]
791 $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))'
791 $ hg log -r 'followlines(baz, 1:2, startrev=desc("b"))'
792 hg: parse error: followlines expects exactly one revision
792 hg: parse error: followlines expects exactly one revision
793 [255]
793 [255]
794 $ hg log -r 'followlines("glob:*", 1:2)'
794 $ hg log -r 'followlines("glob:*", 1:2)'
795 hg: parse error: followlines expects exactly one file
795 hg: parse error: followlines expects exactly one file
796 [255]
796 [255]
797 $ hg log -r 'followlines(baz, 1:)'
797 $ hg log -r 'followlines(baz, 1:)'
798 hg: parse error: line range bounds must be integers
798 hg: parse error: line range bounds must be integers
799 [255]
799 [255]
800 $ hg log -r 'followlines(baz, :1)'
800 $ hg log -r 'followlines(baz, :1)'
801 hg: parse error: line range bounds must be integers
801 hg: parse error: line range bounds must be integers
802 [255]
802 [255]
803 $ hg log -r 'followlines(baz, x:4)'
803 $ hg log -r 'followlines(baz, x:4)'
804 hg: parse error: line range bounds must be integers
804 hg: parse error: line range bounds must be integers
805 [255]
805 [255]
806 $ hg log -r 'followlines(baz, 5:4)'
806 $ hg log -r 'followlines(baz, 5:4)'
807 hg: parse error: line range must be positive
807 hg: parse error: line range must be positive
808 [255]
808 [255]
809 $ hg log -r 'followlines(baz, 0:4)'
809 $ hg log -r 'followlines(baz, 0:4)'
810 hg: parse error: fromline must be strictly positive
810 hg: parse error: fromline must be strictly positive
811 [255]
811 [255]
812 $ hg log -r 'followlines(baz, 2:40)'
812 $ hg log -r 'followlines(baz, 2:40)'
813 abort: line range exceeds file size
813 abort: line range exceeds file size
814 [255]
814 [255]
815 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
815 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=[1])'
816 hg: parse error at 43: not a prefix: [
816 hg: parse error at 43: not a prefix: [
817 (followlines(baz, 2:4, startrev=20, descend=[1])
817 (followlines(baz, 2:4, startrev=20, descend=[1])
818 ^ here)
818 ^ here)
819 [255]
819 [255]
820 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
820 $ hg log -r 'followlines(baz, 2:4, startrev=20, descend=a)'
821 hg: parse error: descend argument must be a boolean
821 hg: parse error: descend argument must be a boolean
822 [255]
822 [255]
823
823
824 Test empty annotate output
824 Test empty annotate output
825
825
826 $ printf '\0' > binary
826 $ printf '\0' > binary
827 $ touch empty
827 $ touch empty
828 $ hg ci -qAm 'add binary and empty files'
828 $ hg ci -qAm 'add binary and empty files'
829
829
830 $ hg annotate binary empty
830 $ hg annotate binary empty
831 binary: binary file
831 binary: binary file
832
832
833 $ hg annotate -Tjson binary empty
833 $ hg annotate -Tjson binary empty
834 [
834 [
835 {
835 {
836 "abspath": "binary",
836 "abspath": "binary",
837 "path": "binary"
837 "path": "binary"
838 },
838 },
839 {
839 {
840 "abspath": "empty",
840 "abspath": "empty",
841 "lines": [],
841 "lines": [],
842 "path": "empty"
842 "path": "empty"
843 }
843 }
844 ]
844 ]
845
845
846 Test annotate with whitespace options
846 Test annotate with whitespace options
847
847
848 $ cd ..
848 $ cd ..
849 $ hg init repo-ws
849 $ hg init repo-ws
850 $ cd repo-ws
850 $ cd repo-ws
851 $ cat > a <<EOF
851 $ cat > a <<EOF
852 > aa
852 > aa
853 >
853 >
854 > b b
854 > b b
855 > EOF
855 > EOF
856 $ hg ci -Am "adda"
856 $ hg ci -Am "adda"
857 adding a
857 adding a
858 $ sed 's/EOL$//g' > a <<EOF
858 $ sed 's/EOL$//g' > a <<EOF
859 > a a
859 > a a
860 >
860 >
861 > EOL
861 > EOL
862 > b b
862 > b b
863 > EOF
863 > EOF
864 $ hg ci -m "changea"
864 $ hg ci -m "changea"
865
865
866 Annotate with no option
866 Annotate with no option
867
867
868 $ hg annotate a
868 $ hg annotate a
869 1: a a
869 1: a a
870 0:
870 0:
871 1:
871 1:
872 1: b b
872 1: b b
873
873
874 Annotate with --ignore-space-change
874 Annotate with --ignore-space-change
875
875
876 $ hg annotate --ignore-space-change a
876 $ hg annotate --ignore-space-change a
877 1: a a
877 1: a a
878 1:
878 1:
879 0:
879 0:
880 0: b b
880 0: b b
881
881
882 Annotate with --ignore-all-space
882 Annotate with --ignore-all-space
883
883
884 $ hg annotate --ignore-all-space a
884 $ hg annotate --ignore-all-space a
885 0: a a
885 0: a a
886 0:
886 0:
887 1:
887 1:
888 0: b b
888 0: b b
889
889
890 Annotate with --ignore-blank-lines (similar to no options case)
890 Annotate with --ignore-blank-lines (similar to no options case)
891
891
892 $ hg annotate --ignore-blank-lines a
892 $ hg annotate --ignore-blank-lines a
893 1: a a
893 1: a a
894 0:
894 0:
895 1:
895 1:
896 1: b b
896 1: b b
897
897
898 $ cd ..
898 $ cd ..
899
899
900 Annotate with orphaned CR (issue5798)
901 -------------------------------------
902
903 $ hg init repo-cr
904 $ cd repo-cr
905
906 $ substcr() {
907 > sed 's/\r/[CR]/g'
908 > }
909
910 >>> with open('a', 'wb') as f:
911 ... f.write(b'0a\r0b\r\n0c\r0d\r\n0e\n0f\n0g')
912 $ hg ci -qAm0
913 >>> with open('a', 'wb') as f:
914 ... f.write(b'0a\r0b\r\n1c\r1d\r\n0e\n1f\n0g')
915 $ hg ci -m1
916
917 $ hg annotate -r0 a | substcr
918 0: 0a[CR]0b[CR]
919 0: 0c[CR]0d[CR]
920 0: 0e
921 0: 0f
922 0: 0g
923 $ hg annotate -r1 a | substcr
924 0: 0a[CR]0b[CR]
925 1: 1c[CR]1d[CR]
926 0: 0e
927 1: 1f
928 0: 0g
929
930 $ cd ..
931
900 Annotate with linkrev pointing to another branch
932 Annotate with linkrev pointing to another branch
901 ------------------------------------------------
933 ------------------------------------------------
902
934
903 create history with a filerev whose linkrev points to another branch
935 create history with a filerev whose linkrev points to another branch
904
936
905 $ hg init branchedlinkrev
937 $ hg init branchedlinkrev
906 $ cd branchedlinkrev
938 $ cd branchedlinkrev
907 $ echo A > a
939 $ echo A > a
908 $ hg commit -Am 'contentA'
940 $ hg commit -Am 'contentA'
909 adding a
941 adding a
910 $ echo B >> a
942 $ echo B >> a
911 $ hg commit -m 'contentB'
943 $ hg commit -m 'contentB'
912 $ hg up --rev 'desc(contentA)'
944 $ hg up --rev 'desc(contentA)'
913 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
945 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
914 $ echo unrelated > unrelated
946 $ echo unrelated > unrelated
915 $ hg commit -Am 'unrelated'
947 $ hg commit -Am 'unrelated'
916 adding unrelated
948 adding unrelated
917 created new head
949 created new head
918 $ hg graft -r 'desc(contentB)'
950 $ hg graft -r 'desc(contentB)'
919 grafting 1:fd27c222e3e6 "contentB"
951 grafting 1:fd27c222e3e6 "contentB"
920 $ echo C >> a
952 $ echo C >> a
921 $ hg commit -m 'contentC'
953 $ hg commit -m 'contentC'
922 $ echo W >> a
954 $ echo W >> a
923 $ hg log -G
955 $ hg log -G
924 @ changeset: 4:072f1e8df249
956 @ changeset: 4:072f1e8df249
925 | tag: tip
957 | tag: tip
926 | user: test
958 | user: test
927 | date: Thu Jan 01 00:00:00 1970 +0000
959 | date: Thu Jan 01 00:00:00 1970 +0000
928 | summary: contentC
960 | summary: contentC
929 |
961 |
930 o changeset: 3:ff38df03cc4b
962 o changeset: 3:ff38df03cc4b
931 | user: test
963 | user: test
932 | date: Thu Jan 01 00:00:00 1970 +0000
964 | date: Thu Jan 01 00:00:00 1970 +0000
933 | summary: contentB
965 | summary: contentB
934 |
966 |
935 o changeset: 2:62aaf3f6fc06
967 o changeset: 2:62aaf3f6fc06
936 | parent: 0:f0932f74827e
968 | parent: 0:f0932f74827e
937 | user: test
969 | user: test
938 | date: Thu Jan 01 00:00:00 1970 +0000
970 | date: Thu Jan 01 00:00:00 1970 +0000
939 | summary: unrelated
971 | summary: unrelated
940 |
972 |
941 | o changeset: 1:fd27c222e3e6
973 | o changeset: 1:fd27c222e3e6
942 |/ user: test
974 |/ user: test
943 | date: Thu Jan 01 00:00:00 1970 +0000
975 | date: Thu Jan 01 00:00:00 1970 +0000
944 | summary: contentB
976 | summary: contentB
945 |
977 |
946 o changeset: 0:f0932f74827e
978 o changeset: 0:f0932f74827e
947 user: test
979 user: test
948 date: Thu Jan 01 00:00:00 1970 +0000
980 date: Thu Jan 01 00:00:00 1970 +0000
949 summary: contentA
981 summary: contentA
950
982
951
983
952 Annotate should list ancestor of starting revision only
984 Annotate should list ancestor of starting revision only
953
985
954 $ hg annotate a
986 $ hg annotate a
955 0: A
987 0: A
956 3: B
988 3: B
957 4: C
989 4: C
958
990
959 $ hg annotate a -r 'wdir()'
991 $ hg annotate a -r 'wdir()'
960 0 : A
992 0 : A
961 3 : B
993 3 : B
962 4 : C
994 4 : C
963 4+: W
995 4+: W
964
996
965 Even when the starting revision is the linkrev-shadowed one:
997 Even when the starting revision is the linkrev-shadowed one:
966
998
967 $ hg annotate a -r 3
999 $ hg annotate a -r 3
968 0: A
1000 0: A
969 3: B
1001 3: B
970
1002
971 $ cd ..
1003 $ cd ..
972
1004
973 Issue5360: Deleted chunk in p1 of a merge changeset
1005 Issue5360: Deleted chunk in p1 of a merge changeset
974
1006
975 $ hg init repo-5360
1007 $ hg init repo-5360
976 $ cd repo-5360
1008 $ cd repo-5360
977 $ echo 1 > a
1009 $ echo 1 > a
978 $ hg commit -A a -m 1
1010 $ hg commit -A a -m 1
979 $ echo 2 >> a
1011 $ echo 2 >> a
980 $ hg commit -m 2
1012 $ hg commit -m 2
981 $ echo a > a
1013 $ echo a > a
982 $ hg commit -m a
1014 $ hg commit -m a
983 $ hg update '.^' -q
1015 $ hg update '.^' -q
984 $ echo 3 >> a
1016 $ echo 3 >> a
985 $ hg commit -m 3 -q
1017 $ hg commit -m 3 -q
986 $ hg merge 2 -q
1018 $ hg merge 2 -q
987 $ cat > a << EOF
1019 $ cat > a << EOF
988 > b
1020 > b
989 > 1
1021 > 1
990 > 2
1022 > 2
991 > 3
1023 > 3
992 > a
1024 > a
993 > EOF
1025 > EOF
994 $ hg resolve --mark -q
1026 $ hg resolve --mark -q
995 $ hg commit -m m
1027 $ hg commit -m m
996 $ hg annotate a
1028 $ hg annotate a
997 4: b
1029 4: b
998 0: 1
1030 0: 1
999 1: 2
1031 1: 2
1000 3: 3
1032 3: 3
1001 2: a
1033 2: a
1002
1034
1003 $ cd ..
1035 $ cd ..
@@ -1,622 +1,696 b''
1 Create test repository:
1 Create test repository:
2
2
3 $ hg init repo
3 $ hg init repo
4 $ cd repo
4 $ cd repo
5 $ echo x1 > x.txt
5 $ echo x1 > x.txt
6
6
7 $ hg init foo
7 $ hg init foo
8 $ cd foo
8 $ cd foo
9 $ echo y1 > y.txt
9 $ echo y1 > y.txt
10
10
11 $ hg init bar
11 $ hg init bar
12 $ cd bar
12 $ cd bar
13 $ echo z1 > z.txt
13 $ echo z1 > z.txt
14
14
15 $ cd ..
15 $ cd ..
16 $ echo 'bar = bar' > .hgsub
16 $ echo 'bar = bar' > .hgsub
17
17
18 $ cd ..
18 $ cd ..
19 $ echo 'foo = foo' > .hgsub
19 $ echo 'foo = foo' > .hgsub
20
20
21 Add files --- .hgsub files must go first to trigger subrepos:
21 Add files --- .hgsub files must go first to trigger subrepos:
22
22
23 $ hg add -S .hgsub
23 $ hg add -S .hgsub
24 $ hg add -S foo/.hgsub
24 $ hg add -S foo/.hgsub
25 $ hg add -S foo/bar
25 $ hg add -S foo/bar
26 adding foo/bar/z.txt
26 adding foo/bar/z.txt
27 $ hg add -S
27 $ hg add -S
28 adding x.txt
28 adding x.txt
29 adding foo/y.txt
29 adding foo/y.txt
30
30
31 Test recursive status without committing anything:
31 Test recursive status without committing anything:
32
32
33 $ hg status -S
33 $ hg status -S
34 A .hgsub
34 A .hgsub
35 A foo/.hgsub
35 A foo/.hgsub
36 A foo/bar/z.txt
36 A foo/bar/z.txt
37 A foo/y.txt
37 A foo/y.txt
38 A x.txt
38 A x.txt
39
39
40 Test recursive diff without committing anything:
40 Test recursive diff without committing anything:
41
41
42 $ hg diff --nodates -S foo
42 $ hg diff --nodates -S foo
43 diff -r 000000000000 foo/.hgsub
43 diff -r 000000000000 foo/.hgsub
44 --- /dev/null
44 --- /dev/null
45 +++ b/foo/.hgsub
45 +++ b/foo/.hgsub
46 @@ -0,0 +1,1 @@
46 @@ -0,0 +1,1 @@
47 +bar = bar
47 +bar = bar
48 diff -r 000000000000 foo/y.txt
48 diff -r 000000000000 foo/y.txt
49 --- /dev/null
49 --- /dev/null
50 +++ b/foo/y.txt
50 +++ b/foo/y.txt
51 @@ -0,0 +1,1 @@
51 @@ -0,0 +1,1 @@
52 +y1
52 +y1
53 diff -r 000000000000 foo/bar/z.txt
53 diff -r 000000000000 foo/bar/z.txt
54 --- /dev/null
54 --- /dev/null
55 +++ b/foo/bar/z.txt
55 +++ b/foo/bar/z.txt
56 @@ -0,0 +1,1 @@
56 @@ -0,0 +1,1 @@
57 +z1
57 +z1
58
58
59 Commits:
59 Commits:
60
60
61 $ hg commit -m fails
61 $ hg commit -m fails
62 abort: uncommitted changes in subrepository "foo"
62 abort: uncommitted changes in subrepository "foo"
63 (use --subrepos for recursive commit)
63 (use --subrepos for recursive commit)
64 [255]
64 [255]
65
65
66 The --subrepos flag overwrite the config setting:
66 The --subrepos flag overwrite the config setting:
67
67
68 $ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos
68 $ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos
69 committing subrepository foo
69 committing subrepository foo
70 committing subrepository foo/bar
70 committing subrepository foo/bar
71
71
72 $ cd foo
72 $ cd foo
73 $ echo y2 >> y.txt
73 $ echo y2 >> y.txt
74 $ hg commit -m 0-1-0
74 $ hg commit -m 0-1-0
75
75
76 $ cd bar
76 $ cd bar
77 $ echo z2 >> z.txt
77 $ echo z2 >> z.txt
78 $ hg commit -m 0-1-1
78 $ hg commit -m 0-1-1
79
79
80 $ cd ..
80 $ cd ..
81 $ hg commit -m 0-2-1
81 $ hg commit -m 0-2-1
82
82
83 $ cd ..
83 $ cd ..
84 $ hg commit -m 1-2-1
84 $ hg commit -m 1-2-1
85
85
86 Change working directory:
86 Change working directory:
87
87
88 $ echo y3 >> foo/y.txt
88 $ echo y3 >> foo/y.txt
89 $ echo z3 >> foo/bar/z.txt
89 $ echo z3 >> foo/bar/z.txt
90 $ hg status -S
90 $ hg status -S
91 M foo/bar/z.txt
91 M foo/bar/z.txt
92 M foo/y.txt
92 M foo/y.txt
93 $ hg diff --nodates -S
93 $ hg diff --nodates -S
94 diff -r d254738c5f5e foo/y.txt
94 diff -r d254738c5f5e foo/y.txt
95 --- a/foo/y.txt
95 --- a/foo/y.txt
96 +++ b/foo/y.txt
96 +++ b/foo/y.txt
97 @@ -1,2 +1,3 @@
97 @@ -1,2 +1,3 @@
98 y1
98 y1
99 y2
99 y2
100 +y3
100 +y3
101 diff -r 9647f22de499 foo/bar/z.txt
101 diff -r 9647f22de499 foo/bar/z.txt
102 --- a/foo/bar/z.txt
102 --- a/foo/bar/z.txt
103 +++ b/foo/bar/z.txt
103 +++ b/foo/bar/z.txt
104 @@ -1,2 +1,3 @@
104 @@ -1,2 +1,3 @@
105 z1
105 z1
106 z2
106 z2
107 +z3
107 +z3
108
108
109 Status call crossing repository boundaries:
109 Status call crossing repository boundaries:
110
110
111 $ hg status -S foo/bar/z.txt
111 $ hg status -S foo/bar/z.txt
112 M foo/bar/z.txt
112 M foo/bar/z.txt
113 $ hg status -S -I 'foo/?.txt'
113 $ hg status -S -I 'foo/?.txt'
114 M foo/y.txt
114 M foo/y.txt
115 $ hg status -S -I '**/?.txt'
115 $ hg status -S -I '**/?.txt'
116 M foo/bar/z.txt
116 M foo/bar/z.txt
117 M foo/y.txt
117 M foo/y.txt
118 $ hg diff --nodates -S -I '**/?.txt'
118 $ hg diff --nodates -S -I '**/?.txt'
119 diff -r d254738c5f5e foo/y.txt
119 diff -r d254738c5f5e foo/y.txt
120 --- a/foo/y.txt
120 --- a/foo/y.txt
121 +++ b/foo/y.txt
121 +++ b/foo/y.txt
122 @@ -1,2 +1,3 @@
122 @@ -1,2 +1,3 @@
123 y1
123 y1
124 y2
124 y2
125 +y3
125 +y3
126 diff -r 9647f22de499 foo/bar/z.txt
126 diff -r 9647f22de499 foo/bar/z.txt
127 --- a/foo/bar/z.txt
127 --- a/foo/bar/z.txt
128 +++ b/foo/bar/z.txt
128 +++ b/foo/bar/z.txt
129 @@ -1,2 +1,3 @@
129 @@ -1,2 +1,3 @@
130 z1
130 z1
131 z2
131 z2
132 +z3
132 +z3
133
133
134 Status from within a subdirectory:
134 Status from within a subdirectory:
135
135
136 $ mkdir dir
136 $ mkdir dir
137 $ cd dir
137 $ cd dir
138 $ echo a1 > a.txt
138 $ echo a1 > a.txt
139 $ hg status -S
139 $ hg status -S
140 M foo/bar/z.txt
140 M foo/bar/z.txt
141 M foo/y.txt
141 M foo/y.txt
142 ? dir/a.txt
142 ? dir/a.txt
143 $ hg diff --nodates -S
143 $ hg diff --nodates -S
144 diff -r d254738c5f5e foo/y.txt
144 diff -r d254738c5f5e foo/y.txt
145 --- a/foo/y.txt
145 --- a/foo/y.txt
146 +++ b/foo/y.txt
146 +++ b/foo/y.txt
147 @@ -1,2 +1,3 @@
147 @@ -1,2 +1,3 @@
148 y1
148 y1
149 y2
149 y2
150 +y3
150 +y3
151 diff -r 9647f22de499 foo/bar/z.txt
151 diff -r 9647f22de499 foo/bar/z.txt
152 --- a/foo/bar/z.txt
152 --- a/foo/bar/z.txt
153 +++ b/foo/bar/z.txt
153 +++ b/foo/bar/z.txt
154 @@ -1,2 +1,3 @@
154 @@ -1,2 +1,3 @@
155 z1
155 z1
156 z2
156 z2
157 +z3
157 +z3
158
158
159 Status with relative path:
159 Status with relative path:
160
160
161 $ hg status -S ..
161 $ hg status -S ..
162 M ../foo/bar/z.txt
162 M ../foo/bar/z.txt
163 M ../foo/y.txt
163 M ../foo/y.txt
164 ? a.txt
164 ? a.txt
165
165
166 XXX: filtering lfilesrepo.status() in 3.3-rc causes these files to be listed as
166 XXX: filtering lfilesrepo.status() in 3.3-rc causes these files to be listed as
167 added instead of modified.
167 added instead of modified.
168 $ hg status -S .. --config extensions.largefiles=
168 $ hg status -S .. --config extensions.largefiles=
169 M ../foo/bar/z.txt
169 M ../foo/bar/z.txt
170 M ../foo/y.txt
170 M ../foo/y.txt
171 ? a.txt
171 ? a.txt
172
172
173 $ hg diff --nodates -S ..
173 $ hg diff --nodates -S ..
174 diff -r d254738c5f5e foo/y.txt
174 diff -r d254738c5f5e foo/y.txt
175 --- a/foo/y.txt
175 --- a/foo/y.txt
176 +++ b/foo/y.txt
176 +++ b/foo/y.txt
177 @@ -1,2 +1,3 @@
177 @@ -1,2 +1,3 @@
178 y1
178 y1
179 y2
179 y2
180 +y3
180 +y3
181 diff -r 9647f22de499 foo/bar/z.txt
181 diff -r 9647f22de499 foo/bar/z.txt
182 --- a/foo/bar/z.txt
182 --- a/foo/bar/z.txt
183 +++ b/foo/bar/z.txt
183 +++ b/foo/bar/z.txt
184 @@ -1,2 +1,3 @@
184 @@ -1,2 +1,3 @@
185 z1
185 z1
186 z2
186 z2
187 +z3
187 +z3
188 $ cd ..
188 $ cd ..
189
189
190 Cleanup and final commit:
190 Cleanup and final commit:
191
191
192 $ rm -r dir
192 $ rm -r dir
193 $ hg commit --subrepos -m 2-3-2
193 $ hg commit --subrepos -m 2-3-2
194 committing subrepository foo
194 committing subrepository foo
195 committing subrepository foo/bar
195 committing subrepository foo/bar
196
196
197 Test explicit path commands within subrepos: add/forget
197 Test explicit path commands within subrepos: add/forget
198 $ echo z1 > foo/bar/z2.txt
198 $ echo z1 > foo/bar/z2.txt
199 $ hg status -S
199 $ hg status -S
200 ? foo/bar/z2.txt
200 ? foo/bar/z2.txt
201 $ hg add foo/bar/z2.txt
201 $ hg add foo/bar/z2.txt
202 $ hg status -S
202 $ hg status -S
203 A foo/bar/z2.txt
203 A foo/bar/z2.txt
204 $ hg forget foo/bar/z2.txt
204 $ hg forget foo/bar/z2.txt
205 $ hg status -S
205 $ hg status -S
206 ? foo/bar/z2.txt
206 ? foo/bar/z2.txt
207 $ hg forget foo/bar/z2.txt
207 $ hg forget foo/bar/z2.txt
208 not removing foo/bar/z2.txt: file is already untracked
208 not removing foo/bar/z2.txt: file is already untracked
209 [1]
209 [1]
210 $ hg status -S
210 $ hg status -S
211 ? foo/bar/z2.txt
211 ? foo/bar/z2.txt
212 $ rm foo/bar/z2.txt
212 $ rm foo/bar/z2.txt
213
213
214 Log with the relationships between repo and its subrepo:
214 Log with the relationships between repo and its subrepo:
215
215
216 $ hg log --template '{rev}:{node|short} {desc}\n'
216 $ hg log --template '{rev}:{node|short} {desc}\n'
217 2:1326fa26d0c0 2-3-2
217 2:1326fa26d0c0 2-3-2
218 1:4b3c9ff4f66b 1-2-1
218 1:4b3c9ff4f66b 1-2-1
219 0:23376cbba0d8 0-0-0
219 0:23376cbba0d8 0-0-0
220
220
221 $ hg -R foo log --template '{rev}:{node|short} {desc}\n'
221 $ hg -R foo log --template '{rev}:{node|short} {desc}\n'
222 3:65903cebad86 2-3-2
222 3:65903cebad86 2-3-2
223 2:d254738c5f5e 0-2-1
223 2:d254738c5f5e 0-2-1
224 1:8629ce7dcc39 0-1-0
224 1:8629ce7dcc39 0-1-0
225 0:af048e97ade2 0-0-0
225 0:af048e97ade2 0-0-0
226
226
227 $ hg -R foo/bar log --template '{rev}:{node|short} {desc}\n'
227 $ hg -R foo/bar log --template '{rev}:{node|short} {desc}\n'
228 2:31ecbdafd357 2-3-2
228 2:31ecbdafd357 2-3-2
229 1:9647f22de499 0-1-1
229 1:9647f22de499 0-1-1
230 0:4904098473f9 0-0-0
230 0:4904098473f9 0-0-0
231
231
232 Status between revisions:
232 Status between revisions:
233
233
234 $ hg status -S
234 $ hg status -S
235 $ hg status -S --rev 0:1
235 $ hg status -S --rev 0:1
236 M .hgsubstate
236 M .hgsubstate
237 M foo/.hgsubstate
237 M foo/.hgsubstate
238 M foo/bar/z.txt
238 M foo/bar/z.txt
239 M foo/y.txt
239 M foo/y.txt
240 $ hg diff --nodates -S -I '**/?.txt' --rev 0:1
240 $ hg diff --nodates -S -I '**/?.txt' --rev 0:1
241 diff -r af048e97ade2 -r d254738c5f5e foo/y.txt
241 diff -r af048e97ade2 -r d254738c5f5e foo/y.txt
242 --- a/foo/y.txt
242 --- a/foo/y.txt
243 +++ b/foo/y.txt
243 +++ b/foo/y.txt
244 @@ -1,1 +1,2 @@
244 @@ -1,1 +1,2 @@
245 y1
245 y1
246 +y2
246 +y2
247 diff -r 4904098473f9 -r 9647f22de499 foo/bar/z.txt
247 diff -r 4904098473f9 -r 9647f22de499 foo/bar/z.txt
248 --- a/foo/bar/z.txt
248 --- a/foo/bar/z.txt
249 +++ b/foo/bar/z.txt
249 +++ b/foo/bar/z.txt
250 @@ -1,1 +1,2 @@
250 @@ -1,1 +1,2 @@
251 z1
251 z1
252 +z2
252 +z2
253
253
254 #if serve
254 #if serve
255 $ cd ..
255 $ cd ..
256 $ hg serve -R repo --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
256 $ hg serve -R repo --debug -S -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log
257 adding = $TESTTMP/repo
257 adding = $TESTTMP/repo
258 adding foo = $TESTTMP/repo/foo
258 adding foo = $TESTTMP/repo/foo
259 adding foo/bar = $TESTTMP/repo/foo/bar
259 adding foo/bar = $TESTTMP/repo/foo/bar
260 listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
260 listening at http://*:$HGPORT/ (bound to *:$HGPORT) (glob) (?)
261 adding = $TESTTMP/repo (?)
261 adding = $TESTTMP/repo (?)
262 adding foo = $TESTTMP/repo/foo (?)
262 adding foo = $TESTTMP/repo/foo (?)
263 adding foo/bar = $TESTTMP/repo/foo/bar (?)
263 adding foo/bar = $TESTTMP/repo/foo/bar (?)
264 $ cat hg1.pid >> $DAEMON_PIDS
264 $ cat hg1.pid >> $DAEMON_PIDS
265
265
266 $ hg clone http://localhost:$HGPORT clone --config progress.disable=True
266 $ hg clone http://localhost:$HGPORT clone --config progress.disable=True
267 requesting all changes
267 requesting all changes
268 adding changesets
268 adding changesets
269 adding manifests
269 adding manifests
270 adding file changes
270 adding file changes
271 added 3 changesets with 5 changes to 3 files
271 added 3 changesets with 5 changes to 3 files
272 new changesets 23376cbba0d8:1326fa26d0c0
272 new changesets 23376cbba0d8:1326fa26d0c0
273 updating to branch default
273 updating to branch default
274 cloning subrepo foo from http://localhost:$HGPORT/foo
274 cloning subrepo foo from http://localhost:$HGPORT/foo
275 requesting all changes
275 requesting all changes
276 adding changesets
276 adding changesets
277 adding manifests
277 adding manifests
278 adding file changes
278 adding file changes
279 added 4 changesets with 7 changes to 3 files
279 added 4 changesets with 7 changes to 3 files
280 new changesets af048e97ade2:65903cebad86
280 new changesets af048e97ade2:65903cebad86
281 cloning subrepo foo/bar from http://localhost:$HGPORT/foo/bar
281 cloning subrepo foo/bar from http://localhost:$HGPORT/foo/bar
282 requesting all changes
282 requesting all changes
283 adding changesets
283 adding changesets
284 adding manifests
284 adding manifests
285 adding file changes
285 adding file changes
286 added 3 changesets with 3 changes to 1 files
286 added 3 changesets with 3 changes to 1 files
287 new changesets 4904098473f9:31ecbdafd357
287 new changesets 4904098473f9:31ecbdafd357
288 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
288 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
289
289
290 $ cat clone/foo/bar/z.txt
290 $ cat clone/foo/bar/z.txt
291 z1
291 z1
292 z2
292 z2
293 z3
293 z3
294
294
295 Clone pooling from a remote URL will share the top level repo and the subrepos,
296 even if they are referenced by remote URL.
297
298 $ hg --config extensions.share= --config share.pool=$TESTTMP/pool \
299 > clone http://localhost:$HGPORT shared
300 (sharing from new pooled repository 23376cbba0d87c15906bb3652584927c140907bf)
301 requesting all changes
302 adding changesets
303 adding manifests
304 adding file changes
305 added 3 changesets with 5 changes to 3 files
306 new changesets 23376cbba0d8:1326fa26d0c0
307 searching for changes
308 no changes found
309 updating working directory
310 cloning subrepo foo from http://localhost:$HGPORT/foo
311 (sharing from new pooled repository af048e97ade2e236f754f05d07013e586af0f8bf)
312 requesting all changes
313 adding changesets
314 adding manifests
315 adding file changes
316 added 4 changesets with 7 changes to 3 files
317 new changesets af048e97ade2:65903cebad86
318 searching for changes
319 no changes found
320 cloning subrepo foo/bar from http://localhost:$HGPORT/foo/bar
321 (sharing from new pooled repository 4904098473f96c900fec436dad267edd4da59fad)
322 requesting all changes
323 adding changesets
324 adding manifests
325 adding file changes
326 added 3 changesets with 3 changes to 1 files
327 new changesets 4904098473f9:31ecbdafd357
328 searching for changes
329 no changes found
330 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
331
295 $ cat access.log
332 $ cat access.log
296 * "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
333 * "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
297 * "GET /?cmd=batch HTTP/1.1" 200 - * (glob)
334 * "GET /?cmd=batch HTTP/1.1" 200 - * (glob)
298 * "GET /?cmd=getbundle HTTP/1.1" 200 - * (glob)
335 * "GET /?cmd=getbundle HTTP/1.1" 200 - * (glob)
299 * "GET /foo?cmd=capabilities HTTP/1.1" 200 - (glob)
336 * "GET /foo?cmd=capabilities HTTP/1.1" 200 - (glob)
300 * "GET /foo?cmd=batch HTTP/1.1" 200 - * (glob)
337 * "GET /foo?cmd=batch HTTP/1.1" 200 - * (glob)
301 * "GET /foo?cmd=getbundle HTTP/1.1" 200 - * (glob)
338 * "GET /foo?cmd=getbundle HTTP/1.1" 200 - * (glob)
302 * "GET /foo/bar?cmd=capabilities HTTP/1.1" 200 - (glob)
339 * "GET /foo/bar?cmd=capabilities HTTP/1.1" 200 - (glob)
303 * "GET /foo/bar?cmd=batch HTTP/1.1" 200 - * (glob)
340 * "GET /foo/bar?cmd=batch HTTP/1.1" 200 - * (glob)
304 * "GET /foo/bar?cmd=getbundle HTTP/1.1" 200 - * (glob)
341 * "GET /foo/bar?cmd=getbundle HTTP/1.1" 200 - * (glob)
342 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
343 $LOCALIP - - [$LOGDATE$] "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=0 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
344 $LOCALIP - - [$LOGDATE$] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob)
345 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
346 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=1326fa26d0c00d2146c63b56bb6a45149d7325ac&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
347 $LOCALIP - - [$LOGDATE$] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D1326fa26d0c00d2146c63b56bb6a45149d7325ac x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
348 $LOCALIP - - [$LOGDATE$] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=1326fa26d0c00d2146c63b56bb6a45149d7325ac&heads=1326fa26d0c00d2146c63b56bb6a45149d7325ac&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
349 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=capabilities HTTP/1.1" 200 - (glob)
350 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=0 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
351 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=capabilities HTTP/1.1" 200 - (glob)
352 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
353 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=65903cebad86f1a84bd4f1134f62fa7dcb7a1c98&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
354 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D65903cebad86f1a84bd4f1134f62fa7dcb7a1c98 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
355 $LOCALIP - - [$LOGDATE$] "GET /foo?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=65903cebad86f1a84bd4f1134f62fa7dcb7a1c98&heads=65903cebad86f1a84bd4f1134f62fa7dcb7a1c98&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
356 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=capabilities HTTP/1.1" 200 - (glob)
357 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=0 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
358 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=capabilities HTTP/1.1" 200 - (glob)
359 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
360 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=1&common=0000000000000000000000000000000000000000&heads=31ecbdafd357f54b281c9bd1d681bb90de219e22&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
361 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D31ecbdafd357f54b281c9bd1d681bb90de219e22 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
362 $LOCALIP - - [$LOGDATE$] "GET /foo/bar?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bookmarks=1&$USUAL_BUNDLE_CAPS$&cg=0&common=31ecbdafd357f54b281c9bd1d681bb90de219e22&heads=31ecbdafd357f54b281c9bd1d681bb90de219e22&listkeys=bookmarks&phases=1 x-hgproto-1:0.1 0.2 comp=$USUAL_COMPRESSIONS$ (glob)
305
363
306 $ killdaemons.py
364 $ killdaemons.py
307 $ rm hg1.pid error.log access.log
365 $ rm hg1.pid error.log access.log
308 $ cd repo
366 $ cd repo
309 #endif
367 #endif
310
368
311 Enable progress extension for archive tests:
369 Enable progress extension for archive tests:
312
370
313 $ cp $HGRCPATH $HGRCPATH.no-progress
371 $ cp $HGRCPATH $HGRCPATH.no-progress
314 $ cat >> $HGRCPATH <<EOF
372 $ cat >> $HGRCPATH <<EOF
315 > [progress]
373 > [progress]
316 > disable=False
374 > disable=False
317 > assume-tty = 1
375 > assume-tty = 1
318 > delay = 0
376 > delay = 0
319 > # set changedelay really large so we don't see nested topics
377 > # set changedelay really large so we don't see nested topics
320 > changedelay = 30000
378 > changedelay = 30000
321 > format = topic bar number
379 > format = topic bar number
322 > refresh = 0
380 > refresh = 0
323 > width = 60
381 > width = 60
324 > EOF
382 > EOF
325
383
326 Test archiving to a directory tree (the doubled lines in the output
384 Test archiving to a directory tree (the doubled lines in the output
327 only show up in the test output, not in real usage):
385 only show up in the test output, not in real usage):
328
386
329 $ hg archive --subrepos ../archive
387 $ hg archive --subrepos ../archive
330 \r (no-eol) (esc)
388 \r (no-eol) (esc)
331 archiving [ ] 0/3\r (no-eol) (esc)
389 archiving [ ] 0/3\r (no-eol) (esc)
332 archiving [=============> ] 1/3\r (no-eol) (esc)
390 archiving [=============> ] 1/3\r (no-eol) (esc)
333 archiving [===========================> ] 2/3\r (no-eol) (esc)
391 archiving [===========================> ] 2/3\r (no-eol) (esc)
334 archiving [==========================================>] 3/3\r (no-eol) (esc)
392 archiving [==========================================>] 3/3\r (no-eol) (esc)
335 \r (no-eol) (esc)
393 \r (no-eol) (esc)
336 \r (no-eol) (esc)
394 \r (no-eol) (esc)
337 archiving (foo) [ ] 0/3\r (no-eol) (esc)
395 archiving (foo) [ ] 0/3\r (no-eol) (esc)
338 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
396 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
339 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
397 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
340 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
398 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
341 \r (no-eol) (esc)
399 \r (no-eol) (esc)
342 \r (no-eol) (esc)
400 \r (no-eol) (esc)
343 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
401 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
344 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
402 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
345 \r (no-eol) (esc)
403 \r (no-eol) (esc)
346 $ find ../archive | sort
404 $ find ../archive | sort
347 ../archive
405 ../archive
348 ../archive/.hg_archival.txt
406 ../archive/.hg_archival.txt
349 ../archive/.hgsub
407 ../archive/.hgsub
350 ../archive/.hgsubstate
408 ../archive/.hgsubstate
351 ../archive/foo
409 ../archive/foo
352 ../archive/foo/.hgsub
410 ../archive/foo/.hgsub
353 ../archive/foo/.hgsubstate
411 ../archive/foo/.hgsubstate
354 ../archive/foo/bar
412 ../archive/foo/bar
355 ../archive/foo/bar/z.txt
413 ../archive/foo/bar/z.txt
356 ../archive/foo/y.txt
414 ../archive/foo/y.txt
357 ../archive/x.txt
415 ../archive/x.txt
358
416
359 Test archiving to zip file (unzip output is unstable):
417 Test archiving to zip file (unzip output is unstable):
360
418
361 $ hg archive --subrepos --prefix '.' ../archive.zip
419 $ hg archive --subrepos --prefix '.' ../archive.zip
362 \r (no-eol) (esc)
420 \r (no-eol) (esc)
363 archiving [ ] 0/3\r (no-eol) (esc)
421 archiving [ ] 0/3\r (no-eol) (esc)
364 archiving [=============> ] 1/3\r (no-eol) (esc)
422 archiving [=============> ] 1/3\r (no-eol) (esc)
365 archiving [===========================> ] 2/3\r (no-eol) (esc)
423 archiving [===========================> ] 2/3\r (no-eol) (esc)
366 archiving [==========================================>] 3/3\r (no-eol) (esc)
424 archiving [==========================================>] 3/3\r (no-eol) (esc)
367 \r (no-eol) (esc)
425 \r (no-eol) (esc)
368 \r (no-eol) (esc)
426 \r (no-eol) (esc)
369 archiving (foo) [ ] 0/3\r (no-eol) (esc)
427 archiving (foo) [ ] 0/3\r (no-eol) (esc)
370 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
428 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
371 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
429 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
372 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
430 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
373 \r (no-eol) (esc)
431 \r (no-eol) (esc)
374 \r (no-eol) (esc)
432 \r (no-eol) (esc)
375 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
433 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
376 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
434 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
377 \r (no-eol) (esc)
435 \r (no-eol) (esc)
378
436
379 (unzip date formating is unstable, we do not care about it and glob it out)
437 (unzip date formating is unstable, we do not care about it and glob it out)
380
438
381 $ unzip -l ../archive.zip | grep -v -- ----- | egrep -v files$
439 $ unzip -l ../archive.zip | grep -v -- ----- | egrep -v files$
382 Archive: ../archive.zip
440 Archive: ../archive.zip
383 Length [ ]* Date [ ]* Time [ ]* Name (re)
441 Length [ ]* Date [ ]* Time [ ]* Name (re)
384 172 [0-9:\- ]* .hg_archival.txt (re)
442 172 [0-9:\- ]* .hg_archival.txt (re)
385 10 [0-9:\- ]* .hgsub (re)
443 10 [0-9:\- ]* .hgsub (re)
386 45 [0-9:\- ]* .hgsubstate (re)
444 45 [0-9:\- ]* .hgsubstate (re)
387 3 [0-9:\- ]* x.txt (re)
445 3 [0-9:\- ]* x.txt (re)
388 10 [0-9:\- ]* foo/.hgsub (re)
446 10 [0-9:\- ]* foo/.hgsub (re)
389 45 [0-9:\- ]* foo/.hgsubstate (re)
447 45 [0-9:\- ]* foo/.hgsubstate (re)
390 9 [0-9:\- ]* foo/y.txt (re)
448 9 [0-9:\- ]* foo/y.txt (re)
391 9 [0-9:\- ]* foo/bar/z.txt (re)
449 9 [0-9:\- ]* foo/bar/z.txt (re)
392
450
393 Test archiving a revision that references a subrepo that is not yet
451 Test archiving a revision that references a subrepo that is not yet
394 cloned:
452 cloned:
395
453
396 #if hardlink
454 #if hardlink
397 $ hg clone -U . ../empty
455 $ hg clone -U . ../empty
398 \r (no-eol) (esc)
456 \r (no-eol) (esc)
399 linking [ <=> ] 1\r (no-eol) (esc)
457 linking [ <=> ] 1\r (no-eol) (esc)
400 linking [ <=> ] 2\r (no-eol) (esc)
458 linking [ <=> ] 2\r (no-eol) (esc)
401 linking [ <=> ] 3\r (no-eol) (esc)
459 linking [ <=> ] 3\r (no-eol) (esc)
402 linking [ <=> ] 4\r (no-eol) (esc)
460 linking [ <=> ] 4\r (no-eol) (esc)
403 linking [ <=> ] 5\r (no-eol) (esc)
461 linking [ <=> ] 5\r (no-eol) (esc)
404 linking [ <=> ] 6\r (no-eol) (esc)
462 linking [ <=> ] 6\r (no-eol) (esc)
405 linking [ <=> ] 7\r (no-eol) (esc)
463 linking [ <=> ] 7\r (no-eol) (esc)
406 linking [ <=> ] 8\r (no-eol) (esc)
464 linking [ <=> ] 8\r (no-eol) (esc)
407 \r (no-eol) (esc)
465 \r (no-eol) (esc)
408 #else
466 #else
409 $ hg clone -U . ../empty
467 $ hg clone -U . ../empty
410 \r (no-eol) (esc)
468 \r (no-eol) (esc)
411 linking [ <=> ] 1 (no-eol)
469 linking [ <=> ] 1 (no-eol)
412 #endif
470 #endif
413
471
414 $ cd ../empty
472 $ cd ../empty
415 #if hardlink
473 #if hardlink
416 $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz
474 $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz
417 \r (no-eol) (esc)
475 \r (no-eol) (esc)
418 archiving [ ] 0/3\r (no-eol) (esc)
476 archiving [ ] 0/3\r (no-eol) (esc)
419 archiving [=============> ] 1/3\r (no-eol) (esc)
477 archiving [=============> ] 1/3\r (no-eol) (esc)
420 archiving [===========================> ] 2/3\r (no-eol) (esc)
478 archiving [===========================> ] 2/3\r (no-eol) (esc)
421 archiving [==========================================>] 3/3\r (no-eol) (esc)
479 archiving [==========================================>] 3/3\r (no-eol) (esc)
422 \r (no-eol) (esc)
480 \r (no-eol) (esc)
423 \r (no-eol) (esc)
481 \r (no-eol) (esc)
424 linking [ <=> ] 1\r (no-eol) (esc)
482 linking [ <=> ] 1\r (no-eol) (esc)
425 linking [ <=> ] 2\r (no-eol) (esc)
483 linking [ <=> ] 2\r (no-eol) (esc)
426 linking [ <=> ] 3\r (no-eol) (esc)
484 linking [ <=> ] 3\r (no-eol) (esc)
427 linking [ <=> ] 4\r (no-eol) (esc)
485 linking [ <=> ] 4\r (no-eol) (esc)
428 linking [ <=> ] 5\r (no-eol) (esc)
486 linking [ <=> ] 5\r (no-eol) (esc)
429 linking [ <=> ] 6\r (no-eol) (esc)
487 linking [ <=> ] 6\r (no-eol) (esc)
430 linking [ <=> ] 7\r (no-eol) (esc)
488 linking [ <=> ] 7\r (no-eol) (esc)
431 linking [ <=> ] 8\r (no-eol) (esc)
489 linking [ <=> ] 8\r (no-eol) (esc)
432 \r (no-eol) (esc)
490 \r (no-eol) (esc)
433 \r (no-eol) (esc)
491 \r (no-eol) (esc)
434 archiving (foo) [ ] 0/3\r (no-eol) (esc)
492 archiving (foo) [ ] 0/3\r (no-eol) (esc)
435 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
493 archiving (foo) [===========> ] 1/3\r (no-eol) (esc)
436 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
494 archiving (foo) [=======================> ] 2/3\r (no-eol) (esc)
437 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
495 archiving (foo) [====================================>] 3/3\r (no-eol) (esc)
438 \r (no-eol) (esc)
496 \r (no-eol) (esc)
439 \r (no-eol) (esc)
497 \r (no-eol) (esc)
440 linking [ <=> ] 1\r (no-eol) (esc)
498 linking [ <=> ] 1\r (no-eol) (esc)
441 linking [ <=> ] 2\r (no-eol) (esc)
499 linking [ <=> ] 2\r (no-eol) (esc)
442 linking [ <=> ] 3\r (no-eol) (esc)
500 linking [ <=> ] 3\r (no-eol) (esc)
443 linking [ <=> ] 4\r (no-eol) (esc)
501 linking [ <=> ] 4\r (no-eol) (esc)
444 linking [ <=> ] 5\r (no-eol) (esc)
502 linking [ <=> ] 5\r (no-eol) (esc)
445 linking [ <=> ] 6\r (no-eol) (esc)
503 linking [ <=> ] 6\r (no-eol) (esc)
446 \r (no-eol) (esc)
504 \r (no-eol) (esc)
447 \r (no-eol) (esc)
505 \r (no-eol) (esc)
448 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
506 archiving (foo/bar) [ ] 0/1\r (no-eol) (esc)
449 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
507 archiving (foo/bar) [================================>] 1/1\r (no-eol) (esc)
450 \r (no-eol) (esc)
508 \r (no-eol) (esc)
451 cloning subrepo foo from $TESTTMP/repo/foo
509 cloning subrepo foo from $TESTTMP/repo/foo
452 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
510 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
453 #else
511 #else
454 Note there's a slight output glitch on non-hardlink systems: the last
512 Note there's a slight output glitch on non-hardlink systems: the last
455 "linking" progress topic never gets closed, leading to slight output corruption on that platform.
513 "linking" progress topic never gets closed, leading to slight output corruption on that platform.
456 $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz
514 $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz
457 \r (no-eol) (esc)
515 \r (no-eol) (esc)
458 archiving [ ] 0/3\r (no-eol) (esc)
516 archiving [ ] 0/3\r (no-eol) (esc)
459 archiving [=============> ] 1/3\r (no-eol) (esc)
517 archiving [=============> ] 1/3\r (no-eol) (esc)
460 archiving [===========================> ] 2/3\r (no-eol) (esc)
518 archiving [===========================> ] 2/3\r (no-eol) (esc)
461 archiving [==========================================>] 3/3\r (no-eol) (esc)
519 archiving [==========================================>] 3/3\r (no-eol) (esc)
462 \r (no-eol) (esc)
520 \r (no-eol) (esc)
463 \r (no-eol) (esc)
521 \r (no-eol) (esc)
464 linking [ <=> ] 1\r (no-eol) (esc)
522 linking [ <=> ] 1\r (no-eol) (esc)
465 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
523 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
466 #endif
524 #endif
467
525
468 Archive + subrepos uses '/' for all component separators
526 Archive + subrepos uses '/' for all component separators
469
527
470 $ tar -tzf ../archive.tar.gz | sort
528 $ tar -tzf ../archive.tar.gz | sort
471 .hg_archival.txt
529 .hg_archival.txt
472 .hgsub
530 .hgsub
473 .hgsubstate
531 .hgsubstate
474 foo/.hgsub
532 foo/.hgsub
475 foo/.hgsubstate
533 foo/.hgsubstate
476 foo/bar/z.txt
534 foo/bar/z.txt
477 foo/y.txt
535 foo/y.txt
478 x.txt
536 x.txt
479
537
480 The newly cloned subrepos contain no working copy:
538 The newly cloned subrepos contain no working copy:
481
539
482 $ hg -R foo summary
540 $ hg -R foo summary
483 parent: -1:000000000000 (no revision checked out)
541 parent: -1:000000000000 (no revision checked out)
484 branch: default
542 branch: default
485 commit: (clean)
543 commit: (clean)
486 update: 4 new changesets (update)
544 update: 4 new changesets (update)
487
545
546 Sharing a local repo without the locally referenced subrepo (i.e. it was never
547 updated from null), fails the same as a clone operation.
548
549 $ hg --config progress.disable=True clone -U ../empty ../empty2
550
551 $ hg --config extensions.share= --config progress.disable=True \
552 > share ../empty2 ../empty_share
553 updating working directory
554 abort: repository $TESTTMP/empty2/foo not found!
555 [255]
556
557 $ hg --config progress.disable=True clone ../empty2 ../empty_clone
558 updating to branch default
559 abort: repository $TESTTMP/empty2/foo not found!
560 [255]
561
488 Disable progress extension and cleanup:
562 Disable progress extension and cleanup:
489
563
490 $ mv $HGRCPATH.no-progress $HGRCPATH
564 $ mv $HGRCPATH.no-progress $HGRCPATH
491
565
492 Test archiving when there is a directory in the way for a subrepo
566 Test archiving when there is a directory in the way for a subrepo
493 created by archive:
567 created by archive:
494
568
495 $ hg clone -U . ../almost-empty
569 $ hg clone -U . ../almost-empty
496 $ cd ../almost-empty
570 $ cd ../almost-empty
497 $ mkdir foo
571 $ mkdir foo
498 $ echo f > foo/f
572 $ echo f > foo/f
499 $ hg archive --subrepos -r tip archive
573 $ hg archive --subrepos -r tip archive
500 cloning subrepo foo from $TESTTMP/empty/foo
574 cloning subrepo foo from $TESTTMP/empty/foo
501 abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepository "foo")
575 abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepository "foo")
502 [255]
576 [255]
503
577
504 Clone and test outgoing:
578 Clone and test outgoing:
505
579
506 $ cd ..
580 $ cd ..
507 $ hg clone repo repo2
581 $ hg clone repo repo2
508 updating to branch default
582 updating to branch default
509 cloning subrepo foo from $TESTTMP/repo/foo
583 cloning subrepo foo from $TESTTMP/repo/foo
510 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
584 cloning subrepo foo/bar from $TESTTMP/repo/foo/bar
511 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
585 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
512 $ cd repo2
586 $ cd repo2
513 $ hg outgoing -S
587 $ hg outgoing -S
514 comparing with $TESTTMP/repo
588 comparing with $TESTTMP/repo
515 searching for changes
589 searching for changes
516 no changes found
590 no changes found
517 comparing with $TESTTMP/repo/foo
591 comparing with $TESTTMP/repo/foo
518 searching for changes
592 searching for changes
519 no changes found
593 no changes found
520 comparing with $TESTTMP/repo/foo/bar
594 comparing with $TESTTMP/repo/foo/bar
521 searching for changes
595 searching for changes
522 no changes found
596 no changes found
523 [1]
597 [1]
524
598
525 Make nested change:
599 Make nested change:
526
600
527 $ echo y4 >> foo/y.txt
601 $ echo y4 >> foo/y.txt
528 $ hg diff --nodates -S
602 $ hg diff --nodates -S
529 diff -r 65903cebad86 foo/y.txt
603 diff -r 65903cebad86 foo/y.txt
530 --- a/foo/y.txt
604 --- a/foo/y.txt
531 +++ b/foo/y.txt
605 +++ b/foo/y.txt
532 @@ -1,3 +1,4 @@
606 @@ -1,3 +1,4 @@
533 y1
607 y1
534 y2
608 y2
535 y3
609 y3
536 +y4
610 +y4
537 $ hg commit --subrepos -m 3-4-2
611 $ hg commit --subrepos -m 3-4-2
538 committing subrepository foo
612 committing subrepository foo
539 $ hg outgoing -S
613 $ hg outgoing -S
540 comparing with $TESTTMP/repo
614 comparing with $TESTTMP/repo
541 searching for changes
615 searching for changes
542 changeset: 3:2655b8ecc4ee
616 changeset: 3:2655b8ecc4ee
543 tag: tip
617 tag: tip
544 user: test
618 user: test
545 date: Thu Jan 01 00:00:00 1970 +0000
619 date: Thu Jan 01 00:00:00 1970 +0000
546 summary: 3-4-2
620 summary: 3-4-2
547
621
548 comparing with $TESTTMP/repo/foo
622 comparing with $TESTTMP/repo/foo
549 searching for changes
623 searching for changes
550 changeset: 4:e96193d6cb36
624 changeset: 4:e96193d6cb36
551 tag: tip
625 tag: tip
552 user: test
626 user: test
553 date: Thu Jan 01 00:00:00 1970 +0000
627 date: Thu Jan 01 00:00:00 1970 +0000
554 summary: 3-4-2
628 summary: 3-4-2
555
629
556 comparing with $TESTTMP/repo/foo/bar
630 comparing with $TESTTMP/repo/foo/bar
557 searching for changes
631 searching for changes
558 no changes found
632 no changes found
559
633
560
634
561 Switch to original repo and setup default path:
635 Switch to original repo and setup default path:
562
636
563 $ cd ../repo
637 $ cd ../repo
564 $ echo '[paths]' >> .hg/hgrc
638 $ echo '[paths]' >> .hg/hgrc
565 $ echo 'default = ../repo2' >> .hg/hgrc
639 $ echo 'default = ../repo2' >> .hg/hgrc
566
640
567 Test incoming:
641 Test incoming:
568
642
569 $ hg incoming -S
643 $ hg incoming -S
570 comparing with $TESTTMP/repo2
644 comparing with $TESTTMP/repo2
571 searching for changes
645 searching for changes
572 changeset: 3:2655b8ecc4ee
646 changeset: 3:2655b8ecc4ee
573 tag: tip
647 tag: tip
574 user: test
648 user: test
575 date: Thu Jan 01 00:00:00 1970 +0000
649 date: Thu Jan 01 00:00:00 1970 +0000
576 summary: 3-4-2
650 summary: 3-4-2
577
651
578 comparing with $TESTTMP/repo2/foo
652 comparing with $TESTTMP/repo2/foo
579 searching for changes
653 searching for changes
580 changeset: 4:e96193d6cb36
654 changeset: 4:e96193d6cb36
581 tag: tip
655 tag: tip
582 user: test
656 user: test
583 date: Thu Jan 01 00:00:00 1970 +0000
657 date: Thu Jan 01 00:00:00 1970 +0000
584 summary: 3-4-2
658 summary: 3-4-2
585
659
586 comparing with $TESTTMP/repo2/foo/bar
660 comparing with $TESTTMP/repo2/foo/bar
587 searching for changes
661 searching for changes
588 no changes found
662 no changes found
589
663
590 $ hg incoming -S --bundle incoming.hg
664 $ hg incoming -S --bundle incoming.hg
591 abort: cannot combine --bundle and --subrepos
665 abort: cannot combine --bundle and --subrepos
592 [255]
666 [255]
593
667
594 Test missing subrepo:
668 Test missing subrepo:
595
669
596 $ rm -r foo
670 $ rm -r foo
597 $ hg status -S
671 $ hg status -S
598 warning: error "unknown revision '65903cebad86f1a84bd4f1134f62fa7dcb7a1c98'" in subrepository "foo"
672 warning: error "unknown revision '65903cebad86f1a84bd4f1134f62fa7dcb7a1c98'" in subrepository "foo"
599
673
600 Issue2619: IndexError: list index out of range on hg add with subrepos
674 Issue2619: IndexError: list index out of range on hg add with subrepos
601 The subrepo must sorts after the explicit filename.
675 The subrepo must sorts after the explicit filename.
602
676
603 $ cd ..
677 $ cd ..
604 $ hg init test
678 $ hg init test
605 $ cd test
679 $ cd test
606 $ hg init x
680 $ hg init x
607 $ echo abc > abc.txt
681 $ echo abc > abc.txt
608 $ hg ci -Am "abc"
682 $ hg ci -Am "abc"
609 adding abc.txt
683 adding abc.txt
610 $ echo "x = x" >> .hgsub
684 $ echo "x = x" >> .hgsub
611 $ hg add .hgsub
685 $ hg add .hgsub
612 $ touch a x/a
686 $ touch a x/a
613 $ hg add a x/a
687 $ hg add a x/a
614
688
615 $ hg ci -Sm "added x"
689 $ hg ci -Sm "added x"
616 committing subrepository x
690 committing subrepository x
617 $ echo abc > x/a
691 $ echo abc > x/a
618 $ hg revert --rev '.^' "set:subrepo('glob:x*')"
692 $ hg revert --rev '.^' "set:subrepo('glob:x*')"
619 abort: subrepository 'x' does not exist in 25ac2c9b3180!
693 abort: subrepository 'x' does not exist in 25ac2c9b3180!
620 [255]
694 [255]
621
695
622 $ cd ..
696 $ cd ..
@@ -1,109 +1,190 b''
1 #require killdaemons
1 #require killdaemons
2
2
3 Preparing the subrepository 'sub'
3 Preparing the subrepository 'sub'
4
4
5 $ hg init sub
5 $ hg init sub
6 $ echo sub > sub/sub
6 $ echo sub > sub/sub
7 $ hg add -R sub
7 $ hg add -R sub
8 adding sub/sub
8 adding sub/sub
9 $ hg commit -R sub -m "sub import"
9 $ hg commit -R sub -m "sub import"
10
10
11 Preparing the 'main' repo which depends on the subrepo 'sub'
11 Preparing the 'main' repo which depends on the subrepo 'sub'
12
12
13 $ hg init main
13 $ hg init main
14 $ echo main > main/main
14 $ echo main > main/main
15 $ echo "sub = ../sub" > main/.hgsub
15 $ echo "sub = ../sub" > main/.hgsub
16 $ hg clone sub main/sub
16 $ hg clone sub main/sub
17 updating to branch default
17 updating to branch default
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
19 $ hg add -R main
19 $ hg add -R main
20 adding main/.hgsub
20 adding main/.hgsub
21 adding main/main
21 adding main/main
22 $ hg commit -R main -m "main import"
22 $ hg commit -R main -m "main import"
23
23
24 Cleaning both repositories, just as a clone -U
24 Cleaning both repositories, just as a clone -U
25
25
26 $ hg up -C -R sub null
26 $ hg up -C -R sub null
27 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
27 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
28 $ hg up -C -R main null
28 $ hg up -C -R main null
29 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
29 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
30 $ rm -rf main/sub
30 $ rm -rf main/sub
31
31
32 hide outer repo
32 hide outer repo
33 $ hg init
33 $ hg init
34
34
35 Serving them both using hgweb
35 Serving them both using hgweb
36
36
37 $ printf '[paths]\n/main = main\nsub = sub\n' > webdir.conf
37 $ printf '[paths]\n/main = main\nsub = sub\n' > webdir.conf
38 $ hg serve --webdir-conf webdir.conf -a localhost -p $HGPORT \
38 $ hg serve --webdir-conf webdir.conf -a localhost -p $HGPORT \
39 > -A /dev/null -E /dev/null --pid-file hg.pid -d
39 > -A /dev/null -E /dev/null --pid-file hg.pid -d
40 $ cat hg.pid >> $DAEMON_PIDS
40 $ cat hg.pid >> $DAEMON_PIDS
41
41
42 Clone main from hgweb
42 Clone main from hgweb
43
43
44 $ hg clone "http://localhost:$HGPORT/main" cloned
44 $ hg clone "http://localhost:$HGPORT/main" cloned
45 requesting all changes
45 requesting all changes
46 adding changesets
46 adding changesets
47 adding manifests
47 adding manifests
48 adding file changes
48 adding file changes
49 added 1 changesets with 3 changes to 3 files
49 added 1 changesets with 3 changes to 3 files
50 new changesets fdfeeb3e979e
50 new changesets fdfeeb3e979e
51 updating to branch default
51 updating to branch default
52 cloning subrepo sub from http://localhost:$HGPORT/sub
52 cloning subrepo sub from http://localhost:$HGPORT/sub
53 requesting all changes
53 requesting all changes
54 adding changesets
54 adding changesets
55 adding manifests
55 adding manifests
56 adding file changes
56 adding file changes
57 added 1 changesets with 1 changes to 1 files
57 added 1 changesets with 1 changes to 1 files
58 new changesets 863c1745b441
58 new changesets 863c1745b441
59 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
60
60
61 Checking cloned repo ids
61 Checking cloned repo ids
62
62
63 $ hg id -R cloned
63 $ hg id -R cloned
64 fdfeeb3e979e tip
64 fdfeeb3e979e tip
65 $ hg id -R cloned/sub
65 $ hg id -R cloned/sub
66 863c1745b441 tip
66 863c1745b441 tip
67
67
68 subrepo debug for 'main' clone
68 subrepo debug for 'main' clone
69
69
70 $ hg debugsub -R cloned
70 $ hg debugsub -R cloned
71 path sub
71 path sub
72 source ../sub
72 source ../sub
73 revision 863c1745b441bd97a8c4a096e87793073f4fb215
73 revision 863c1745b441bd97a8c4a096e87793073f4fb215
74
74
75 Test sharing with a remote URL reference
76
77 $ hg init absolute_subrepo
78 $ cd absolute_subrepo
79 $ echo foo > foo.txt
80 $ hg ci -Am 'initial commit'
81 adding foo.txt
82 $ echo "sub = http://localhost:$HGPORT/sub" > .hgsub
83 $ hg ci -Am 'add absolute subrepo'
84 adding .hgsub
85 $ cd ..
86
87 Clone pooling works for local clones with a remote subrepo reference. The
88 subrepo is cloned to the pool and shared from there, so that all clones will
89 share the same subrepo.
90
91 $ hg --config extensions.share= --config share.pool=$TESTTMP/pool \
92 > clone absolute_subrepo cloned_from_abs
93 (sharing from new pooled repository 8d6a2f1e993b34b6557de0042cfe825ae12a8dae)
94 requesting all changes
95 adding changesets
96 adding manifests
97 adding file changes
98 added 2 changesets with 3 changes to 3 files
99 new changesets 8d6a2f1e993b:* (glob)
100 searching for changes
101 no changes found
102 updating working directory
103 cloning subrepo sub from http://localhost:$HGPORT/sub
104 (sharing from new pooled repository 863c1745b441bd97a8c4a096e87793073f4fb215)
105 requesting all changes
106 adding changesets
107 adding manifests
108 adding file changes
109 added 1 changesets with 1 changes to 1 files
110 new changesets 863c1745b441
111 searching for changes
112 no changes found
113 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
114
115 Vanilla sharing with a subrepo remote path reference will clone the subrepo.
116 Each share of these top level repos will end up with independent subrepo copies
117 (potentially leaving the shared parent with dangling cset references).
118
119 $ hg --config extensions.share= share absolute_subrepo shared_from_abs
120 updating working directory
121 cloning subrepo sub from http://localhost:$HGPORT/sub
122 requesting all changes
123 adding changesets
124 adding manifests
125 adding file changes
126 added 1 changesets with 1 changes to 1 files
127 new changesets 863c1745b441
128 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
129
130 $ hg --config extensions.share= share -U absolute_subrepo shared_from_abs2
131 $ hg -R shared_from_abs2 update -r tip
132 cloning subrepo sub from http://localhost:$HGPORT/sub
133 requesting all changes
134 adding changesets
135 adding manifests
136 adding file changes
137 added 1 changesets with 1 changes to 1 files
138 new changesets 863c1745b441
139 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
140
141 A parent repo without its subrepo available locally can be shared if the
142 subrepo is referenced by absolute path.
143
144 $ hg clone -U absolute_subrepo cloned_null_from_abs
145 $ hg --config extensions.share= share cloned_null_from_abs shared_from_null_abs
146 updating working directory
147 cloning subrepo sub from http://localhost:$HGPORT/sub
148 requesting all changes
149 adding changesets
150 adding manifests
151 adding file changes
152 added 1 changesets with 1 changes to 1 files
153 new changesets 863c1745b441
154 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
155
75 $ killdaemons.py
156 $ killdaemons.py
76
157
77 subrepo paths with ssh urls
158 subrepo paths with ssh urls
78
159
79 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/cloned sshclone
160 $ hg clone -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/cloned sshclone
80 requesting all changes
161 requesting all changes
81 adding changesets
162 adding changesets
82 adding manifests
163 adding manifests
83 adding file changes
164 adding file changes
84 added 1 changesets with 3 changes to 3 files
165 added 1 changesets with 3 changes to 3 files
85 new changesets fdfeeb3e979e
166 new changesets fdfeeb3e979e
86 updating to branch default
167 updating to branch default
87 cloning subrepo sub from ssh://user@dummy/sub
168 cloning subrepo sub from ssh://user@dummy/sub
88 requesting all changes
169 requesting all changes
89 adding changesets
170 adding changesets
90 adding manifests
171 adding manifests
91 adding file changes
172 adding file changes
92 added 1 changesets with 1 changes to 1 files
173 added 1 changesets with 1 changes to 1 files
93 new changesets 863c1745b441
174 new changesets 863c1745b441
94 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
175 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
95
176
96 $ hg -R sshclone push -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/cloned
177 $ hg -R sshclone push -e "\"$PYTHON\" \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/cloned
97 pushing to ssh://user@dummy/$TESTTMP/cloned
178 pushing to ssh://user@dummy/$TESTTMP/cloned
98 pushing subrepo sub to ssh://user@dummy/$TESTTMP/sub
179 pushing subrepo sub to ssh://user@dummy/$TESTTMP/sub
99 searching for changes
180 searching for changes
100 no changes found
181 no changes found
101 searching for changes
182 searching for changes
102 no changes found
183 no changes found
103 [1]
184 [1]
104
185
105 $ cat dummylog
186 $ cat dummylog
106 Got arguments 1:user@dummy 2:hg -R cloned serve --stdio
187 Got arguments 1:user@dummy 2:hg -R cloned serve --stdio
107 Got arguments 1:user@dummy 2:hg -R sub serve --stdio
188 Got arguments 1:user@dummy 2:hg -R sub serve --stdio
108 Got arguments 1:user@dummy 2:hg -R $TESTTMP/cloned serve --stdio
189 Got arguments 1:user@dummy 2:hg -R $TESTTMP/cloned serve --stdio
109 Got arguments 1:user@dummy 2:hg -R $TESTTMP/sub serve --stdio
190 Got arguments 1:user@dummy 2:hg -R $TESTTMP/sub serve --stdio
General Comments 0
You need to be logged in to leave comments. Login now