##// END OF EJS Templates
verify: don't init subrepo when missing one is referenced (issue5128) (API)...
Matt Harbison -
r29021:92d37fb3 stable
parent child Browse files
Show More
@@ -1,1980 +1,1980
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import os
11 import os
12 import re
12 import re
13 import stat
13 import stat
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import (
16 from .node import (
17 bin,
17 bin,
18 hex,
18 hex,
19 nullid,
19 nullid,
20 nullrev,
20 nullrev,
21 short,
21 short,
22 wdirid,
22 wdirid,
23 )
23 )
24 from . import (
24 from . import (
25 encoding,
25 encoding,
26 error,
26 error,
27 fileset,
27 fileset,
28 match as matchmod,
28 match as matchmod,
29 mdiff,
29 mdiff,
30 obsolete as obsmod,
30 obsolete as obsmod,
31 patch,
31 patch,
32 phases,
32 phases,
33 repoview,
33 repoview,
34 revlog,
34 revlog,
35 scmutil,
35 scmutil,
36 subrepo,
36 subrepo,
37 util,
37 util,
38 )
38 )
39
39
40 propertycache = util.propertycache
40 propertycache = util.propertycache
41
41
42 # Phony node value to stand-in for new files in some uses of
42 # Phony node value to stand-in for new files in some uses of
43 # manifests. Manifests support 21-byte hashes for nodes which are
43 # manifests. Manifests support 21-byte hashes for nodes which are
44 # dirty in the working copy.
44 # dirty in the working copy.
45 _newnode = '!' * 21
45 _newnode = '!' * 21
46
46
47 nonascii = re.compile(r'[^\x21-\x7f]').search
47 nonascii = re.compile(r'[^\x21-\x7f]').search
48
48
49 class basectx(object):
49 class basectx(object):
50 """A basectx object represents the common logic for its children:
50 """A basectx object represents the common logic for its children:
51 changectx: read-only context that is already present in the repo,
51 changectx: read-only context that is already present in the repo,
52 workingctx: a context that represents the working directory and can
52 workingctx: a context that represents the working directory and can
53 be committed,
53 be committed,
54 memctx: a context that represents changes in-memory and can also
54 memctx: a context that represents changes in-memory and can also
55 be committed."""
55 be committed."""
56 def __new__(cls, repo, changeid='', *args, **kwargs):
56 def __new__(cls, repo, changeid='', *args, **kwargs):
57 if isinstance(changeid, basectx):
57 if isinstance(changeid, basectx):
58 return changeid
58 return changeid
59
59
60 o = super(basectx, cls).__new__(cls)
60 o = super(basectx, cls).__new__(cls)
61
61
62 o._repo = repo
62 o._repo = repo
63 o._rev = nullrev
63 o._rev = nullrev
64 o._node = nullid
64 o._node = nullid
65
65
66 return o
66 return o
67
67
68 def __str__(self):
68 def __str__(self):
69 return short(self.node())
69 return short(self.node())
70
70
71 def __int__(self):
71 def __int__(self):
72 return self.rev()
72 return self.rev()
73
73
74 def __repr__(self):
74 def __repr__(self):
75 return "<%s %s>" % (type(self).__name__, str(self))
75 return "<%s %s>" % (type(self).__name__, str(self))
76
76
77 def __eq__(self, other):
77 def __eq__(self, other):
78 try:
78 try:
79 return type(self) == type(other) and self._rev == other._rev
79 return type(self) == type(other) and self._rev == other._rev
80 except AttributeError:
80 except AttributeError:
81 return False
81 return False
82
82
83 def __ne__(self, other):
83 def __ne__(self, other):
84 return not (self == other)
84 return not (self == other)
85
85
86 def __contains__(self, key):
86 def __contains__(self, key):
87 return key in self._manifest
87 return key in self._manifest
88
88
89 def __getitem__(self, key):
89 def __getitem__(self, key):
90 return self.filectx(key)
90 return self.filectx(key)
91
91
92 def __iter__(self):
92 def __iter__(self):
93 return iter(self._manifest)
93 return iter(self._manifest)
94
94
95 def _manifestmatches(self, match, s):
95 def _manifestmatches(self, match, s):
96 """generate a new manifest filtered by the match argument
96 """generate a new manifest filtered by the match argument
97
97
98 This method is for internal use only and mainly exists to provide an
98 This method is for internal use only and mainly exists to provide an
99 object oriented way for other contexts to customize the manifest
99 object oriented way for other contexts to customize the manifest
100 generation.
100 generation.
101 """
101 """
102 return self.manifest().matches(match)
102 return self.manifest().matches(match)
103
103
104 def _matchstatus(self, other, match):
104 def _matchstatus(self, other, match):
105 """return match.always if match is none
105 """return match.always if match is none
106
106
107 This internal method provides a way for child objects to override the
107 This internal method provides a way for child objects to override the
108 match operator.
108 match operator.
109 """
109 """
110 return match or matchmod.always(self._repo.root, self._repo.getcwd())
110 return match or matchmod.always(self._repo.root, self._repo.getcwd())
111
111
112 def _buildstatus(self, other, s, match, listignored, listclean,
112 def _buildstatus(self, other, s, match, listignored, listclean,
113 listunknown):
113 listunknown):
114 """build a status with respect to another context"""
114 """build a status with respect to another context"""
115 # Load earliest manifest first for caching reasons. More specifically,
115 # Load earliest manifest first for caching reasons. More specifically,
116 # if you have revisions 1000 and 1001, 1001 is probably stored as a
116 # if you have revisions 1000 and 1001, 1001 is probably stored as a
117 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
117 # delta against 1000. Thus, if you read 1000 first, we'll reconstruct
118 # 1000 and cache it so that when you read 1001, we just need to apply a
118 # 1000 and cache it so that when you read 1001, we just need to apply a
119 # delta to what's in the cache. So that's one full reconstruction + one
119 # delta to what's in the cache. So that's one full reconstruction + one
120 # delta application.
120 # delta application.
121 if self.rev() is not None and self.rev() < other.rev():
121 if self.rev() is not None and self.rev() < other.rev():
122 self.manifest()
122 self.manifest()
123 mf1 = other._manifestmatches(match, s)
123 mf1 = other._manifestmatches(match, s)
124 mf2 = self._manifestmatches(match, s)
124 mf2 = self._manifestmatches(match, s)
125
125
126 modified, added = [], []
126 modified, added = [], []
127 removed = []
127 removed = []
128 clean = []
128 clean = []
129 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
129 deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
130 deletedset = set(deleted)
130 deletedset = set(deleted)
131 d = mf1.diff(mf2, clean=listclean)
131 d = mf1.diff(mf2, clean=listclean)
132 for fn, value in d.iteritems():
132 for fn, value in d.iteritems():
133 if fn in deletedset:
133 if fn in deletedset:
134 continue
134 continue
135 if value is None:
135 if value is None:
136 clean.append(fn)
136 clean.append(fn)
137 continue
137 continue
138 (node1, flag1), (node2, flag2) = value
138 (node1, flag1), (node2, flag2) = value
139 if node1 is None:
139 if node1 is None:
140 added.append(fn)
140 added.append(fn)
141 elif node2 is None:
141 elif node2 is None:
142 removed.append(fn)
142 removed.append(fn)
143 elif flag1 != flag2:
143 elif flag1 != flag2:
144 modified.append(fn)
144 modified.append(fn)
145 elif node2 != _newnode:
145 elif node2 != _newnode:
146 # When comparing files between two commits, we save time by
146 # When comparing files between two commits, we save time by
147 # not comparing the file contents when the nodeids differ.
147 # not comparing the file contents when the nodeids differ.
148 # Note that this means we incorrectly report a reverted change
148 # Note that this means we incorrectly report a reverted change
149 # to a file as a modification.
149 # to a file as a modification.
150 modified.append(fn)
150 modified.append(fn)
151 elif self[fn].cmp(other[fn]):
151 elif self[fn].cmp(other[fn]):
152 modified.append(fn)
152 modified.append(fn)
153 else:
153 else:
154 clean.append(fn)
154 clean.append(fn)
155
155
156 if removed:
156 if removed:
157 # need to filter files if they are already reported as removed
157 # need to filter files if they are already reported as removed
158 unknown = [fn for fn in unknown if fn not in mf1]
158 unknown = [fn for fn in unknown if fn not in mf1]
159 ignored = [fn for fn in ignored if fn not in mf1]
159 ignored = [fn for fn in ignored if fn not in mf1]
160 # if they're deleted, don't report them as removed
160 # if they're deleted, don't report them as removed
161 removed = [fn for fn in removed if fn not in deletedset]
161 removed = [fn for fn in removed if fn not in deletedset]
162
162
163 return scmutil.status(modified, added, removed, deleted, unknown,
163 return scmutil.status(modified, added, removed, deleted, unknown,
164 ignored, clean)
164 ignored, clean)
165
165
166 @propertycache
166 @propertycache
167 def substate(self):
167 def substate(self):
168 return subrepo.state(self, self._repo.ui)
168 return subrepo.state(self, self._repo.ui)
169
169
170 def subrev(self, subpath):
170 def subrev(self, subpath):
171 return self.substate[subpath][1]
171 return self.substate[subpath][1]
172
172
173 def rev(self):
173 def rev(self):
174 return self._rev
174 return self._rev
175 def node(self):
175 def node(self):
176 return self._node
176 return self._node
177 def hex(self):
177 def hex(self):
178 return hex(self.node())
178 return hex(self.node())
179 def manifest(self):
179 def manifest(self):
180 return self._manifest
180 return self._manifest
181 def repo(self):
181 def repo(self):
182 return self._repo
182 return self._repo
183 def phasestr(self):
183 def phasestr(self):
184 return phases.phasenames[self.phase()]
184 return phases.phasenames[self.phase()]
185 def mutable(self):
185 def mutable(self):
186 return self.phase() > phases.public
186 return self.phase() > phases.public
187
187
188 def getfileset(self, expr):
188 def getfileset(self, expr):
189 return fileset.getfileset(self, expr)
189 return fileset.getfileset(self, expr)
190
190
191 def obsolete(self):
191 def obsolete(self):
192 """True if the changeset is obsolete"""
192 """True if the changeset is obsolete"""
193 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
193 return self.rev() in obsmod.getrevs(self._repo, 'obsolete')
194
194
195 def extinct(self):
195 def extinct(self):
196 """True if the changeset is extinct"""
196 """True if the changeset is extinct"""
197 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
197 return self.rev() in obsmod.getrevs(self._repo, 'extinct')
198
198
199 def unstable(self):
199 def unstable(self):
200 """True if the changeset is not obsolete but it's ancestor are"""
200 """True if the changeset is not obsolete but it's ancestor are"""
201 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
201 return self.rev() in obsmod.getrevs(self._repo, 'unstable')
202
202
203 def bumped(self):
203 def bumped(self):
204 """True if the changeset try to be a successor of a public changeset
204 """True if the changeset try to be a successor of a public changeset
205
205
206 Only non-public and non-obsolete changesets may be bumped.
206 Only non-public and non-obsolete changesets may be bumped.
207 """
207 """
208 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
208 return self.rev() in obsmod.getrevs(self._repo, 'bumped')
209
209
210 def divergent(self):
210 def divergent(self):
211 """Is a successors of a changeset with multiple possible successors set
211 """Is a successors of a changeset with multiple possible successors set
212
212
213 Only non-public and non-obsolete changesets may be divergent.
213 Only non-public and non-obsolete changesets may be divergent.
214 """
214 """
215 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
215 return self.rev() in obsmod.getrevs(self._repo, 'divergent')
216
216
217 def troubled(self):
217 def troubled(self):
218 """True if the changeset is either unstable, bumped or divergent"""
218 """True if the changeset is either unstable, bumped or divergent"""
219 return self.unstable() or self.bumped() or self.divergent()
219 return self.unstable() or self.bumped() or self.divergent()
220
220
221 def troubles(self):
221 def troubles(self):
222 """return the list of troubles affecting this changesets.
222 """return the list of troubles affecting this changesets.
223
223
224 Troubles are returned as strings. possible values are:
224 Troubles are returned as strings. possible values are:
225 - unstable,
225 - unstable,
226 - bumped,
226 - bumped,
227 - divergent.
227 - divergent.
228 """
228 """
229 troubles = []
229 troubles = []
230 if self.unstable():
230 if self.unstable():
231 troubles.append('unstable')
231 troubles.append('unstable')
232 if self.bumped():
232 if self.bumped():
233 troubles.append('bumped')
233 troubles.append('bumped')
234 if self.divergent():
234 if self.divergent():
235 troubles.append('divergent')
235 troubles.append('divergent')
236 return troubles
236 return troubles
237
237
238 def parents(self):
238 def parents(self):
239 """return contexts for each parent changeset"""
239 """return contexts for each parent changeset"""
240 return self._parents
240 return self._parents
241
241
242 def p1(self):
242 def p1(self):
243 return self._parents[0]
243 return self._parents[0]
244
244
245 def p2(self):
245 def p2(self):
246 parents = self._parents
246 parents = self._parents
247 if len(parents) == 2:
247 if len(parents) == 2:
248 return parents[1]
248 return parents[1]
249 return changectx(self._repo, nullrev)
249 return changectx(self._repo, nullrev)
250
250
251 def _fileinfo(self, path):
251 def _fileinfo(self, path):
252 if '_manifest' in self.__dict__:
252 if '_manifest' in self.__dict__:
253 try:
253 try:
254 return self._manifest[path], self._manifest.flags(path)
254 return self._manifest[path], self._manifest.flags(path)
255 except KeyError:
255 except KeyError:
256 raise error.ManifestLookupError(self._node, path,
256 raise error.ManifestLookupError(self._node, path,
257 _('not found in manifest'))
257 _('not found in manifest'))
258 if '_manifestdelta' in self.__dict__ or path in self.files():
258 if '_manifestdelta' in self.__dict__ or path in self.files():
259 if path in self._manifestdelta:
259 if path in self._manifestdelta:
260 return (self._manifestdelta[path],
260 return (self._manifestdelta[path],
261 self._manifestdelta.flags(path))
261 self._manifestdelta.flags(path))
262 node, flag = self._repo.manifest.find(self._changeset.manifest, path)
262 node, flag = self._repo.manifest.find(self._changeset.manifest, path)
263 if not node:
263 if not node:
264 raise error.ManifestLookupError(self._node, path,
264 raise error.ManifestLookupError(self._node, path,
265 _('not found in manifest'))
265 _('not found in manifest'))
266
266
267 return node, flag
267 return node, flag
268
268
269 def filenode(self, path):
269 def filenode(self, path):
270 return self._fileinfo(path)[0]
270 return self._fileinfo(path)[0]
271
271
272 def flags(self, path):
272 def flags(self, path):
273 try:
273 try:
274 return self._fileinfo(path)[1]
274 return self._fileinfo(path)[1]
275 except error.LookupError:
275 except error.LookupError:
276 return ''
276 return ''
277
277
278 def sub(self, path):
278 def sub(self, path, allowcreate=True):
279 '''return a subrepo for the stored revision of path, never wdir()'''
279 '''return a subrepo for the stored revision of path, never wdir()'''
280 return subrepo.subrepo(self, path)
280 return subrepo.subrepo(self, path, allowcreate=allowcreate)
281
281
282 def nullsub(self, path, pctx):
282 def nullsub(self, path, pctx):
283 return subrepo.nullsubrepo(self, path, pctx)
283 return subrepo.nullsubrepo(self, path, pctx)
284
284
285 def workingsub(self, path):
285 def workingsub(self, path):
286 '''return a subrepo for the stored revision, or wdir if this is a wdir
286 '''return a subrepo for the stored revision, or wdir if this is a wdir
287 context.
287 context.
288 '''
288 '''
289 return subrepo.subrepo(self, path, allowwdir=True)
289 return subrepo.subrepo(self, path, allowwdir=True)
290
290
291 def match(self, pats=[], include=None, exclude=None, default='glob',
291 def match(self, pats=[], include=None, exclude=None, default='glob',
292 listsubrepos=False, badfn=None):
292 listsubrepos=False, badfn=None):
293 r = self._repo
293 r = self._repo
294 return matchmod.match(r.root, r.getcwd(), pats,
294 return matchmod.match(r.root, r.getcwd(), pats,
295 include, exclude, default,
295 include, exclude, default,
296 auditor=r.nofsauditor, ctx=self,
296 auditor=r.nofsauditor, ctx=self,
297 listsubrepos=listsubrepos, badfn=badfn)
297 listsubrepos=listsubrepos, badfn=badfn)
298
298
299 def diff(self, ctx2=None, match=None, **opts):
299 def diff(self, ctx2=None, match=None, **opts):
300 """Returns a diff generator for the given contexts and matcher"""
300 """Returns a diff generator for the given contexts and matcher"""
301 if ctx2 is None:
301 if ctx2 is None:
302 ctx2 = self.p1()
302 ctx2 = self.p1()
303 if ctx2 is not None:
303 if ctx2 is not None:
304 ctx2 = self._repo[ctx2]
304 ctx2 = self._repo[ctx2]
305 diffopts = patch.diffopts(self._repo.ui, opts)
305 diffopts = patch.diffopts(self._repo.ui, opts)
306 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
306 return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts)
307
307
308 def dirs(self):
308 def dirs(self):
309 return self._manifest.dirs()
309 return self._manifest.dirs()
310
310
311 def hasdir(self, dir):
311 def hasdir(self, dir):
312 return self._manifest.hasdir(dir)
312 return self._manifest.hasdir(dir)
313
313
314 def dirty(self, missing=False, merge=True, branch=True):
314 def dirty(self, missing=False, merge=True, branch=True):
315 return False
315 return False
316
316
317 def status(self, other=None, match=None, listignored=False,
317 def status(self, other=None, match=None, listignored=False,
318 listclean=False, listunknown=False, listsubrepos=False):
318 listclean=False, listunknown=False, listsubrepos=False):
319 """return status of files between two nodes or node and working
319 """return status of files between two nodes or node and working
320 directory.
320 directory.
321
321
322 If other is None, compare this node with working directory.
322 If other is None, compare this node with working directory.
323
323
324 returns (modified, added, removed, deleted, unknown, ignored, clean)
324 returns (modified, added, removed, deleted, unknown, ignored, clean)
325 """
325 """
326
326
327 ctx1 = self
327 ctx1 = self
328 ctx2 = self._repo[other]
328 ctx2 = self._repo[other]
329
329
330 # This next code block is, admittedly, fragile logic that tests for
330 # This next code block is, admittedly, fragile logic that tests for
331 # reversing the contexts and wouldn't need to exist if it weren't for
331 # reversing the contexts and wouldn't need to exist if it weren't for
332 # the fast (and common) code path of comparing the working directory
332 # the fast (and common) code path of comparing the working directory
333 # with its first parent.
333 # with its first parent.
334 #
334 #
335 # What we're aiming for here is the ability to call:
335 # What we're aiming for here is the ability to call:
336 #
336 #
337 # workingctx.status(parentctx)
337 # workingctx.status(parentctx)
338 #
338 #
339 # If we always built the manifest for each context and compared those,
339 # If we always built the manifest for each context and compared those,
340 # then we'd be done. But the special case of the above call means we
340 # then we'd be done. But the special case of the above call means we
341 # just copy the manifest of the parent.
341 # just copy the manifest of the parent.
342 reversed = False
342 reversed = False
343 if (not isinstance(ctx1, changectx)
343 if (not isinstance(ctx1, changectx)
344 and isinstance(ctx2, changectx)):
344 and isinstance(ctx2, changectx)):
345 reversed = True
345 reversed = True
346 ctx1, ctx2 = ctx2, ctx1
346 ctx1, ctx2 = ctx2, ctx1
347
347
348 match = ctx2._matchstatus(ctx1, match)
348 match = ctx2._matchstatus(ctx1, match)
349 r = scmutil.status([], [], [], [], [], [], [])
349 r = scmutil.status([], [], [], [], [], [], [])
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
350 r = ctx2._buildstatus(ctx1, r, match, listignored, listclean,
351 listunknown)
351 listunknown)
352
352
353 if reversed:
353 if reversed:
354 # Reverse added and removed. Clear deleted, unknown and ignored as
354 # Reverse added and removed. Clear deleted, unknown and ignored as
355 # these make no sense to reverse.
355 # these make no sense to reverse.
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
356 r = scmutil.status(r.modified, r.removed, r.added, [], [], [],
357 r.clean)
357 r.clean)
358
358
359 if listsubrepos:
359 if listsubrepos:
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
360 for subpath, sub in scmutil.itersubrepos(ctx1, ctx2):
361 try:
361 try:
362 rev2 = ctx2.subrev(subpath)
362 rev2 = ctx2.subrev(subpath)
363 except KeyError:
363 except KeyError:
364 # A subrepo that existed in node1 was deleted between
364 # A subrepo that existed in node1 was deleted between
365 # node1 and node2 (inclusive). Thus, ctx2's substate
365 # node1 and node2 (inclusive). Thus, ctx2's substate
366 # won't contain that subpath. The best we can do ignore it.
366 # won't contain that subpath. The best we can do ignore it.
367 rev2 = None
367 rev2 = None
368 submatch = matchmod.subdirmatcher(subpath, match)
368 submatch = matchmod.subdirmatcher(subpath, match)
369 s = sub.status(rev2, match=submatch, ignored=listignored,
369 s = sub.status(rev2, match=submatch, ignored=listignored,
370 clean=listclean, unknown=listunknown,
370 clean=listclean, unknown=listunknown,
371 listsubrepos=True)
371 listsubrepos=True)
372 for rfiles, sfiles in zip(r, s):
372 for rfiles, sfiles in zip(r, s):
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
373 rfiles.extend("%s/%s" % (subpath, f) for f in sfiles)
374
374
375 for l in r:
375 for l in r:
376 l.sort()
376 l.sort()
377
377
378 return r
378 return r
379
379
380
380
381 def makememctx(repo, parents, text, user, date, branch, files, store,
381 def makememctx(repo, parents, text, user, date, branch, files, store,
382 editor=None, extra=None):
382 editor=None, extra=None):
383 def getfilectx(repo, memctx, path):
383 def getfilectx(repo, memctx, path):
384 data, mode, copied = store.getfile(path)
384 data, mode, copied = store.getfile(path)
385 if data is None:
385 if data is None:
386 return None
386 return None
387 islink, isexec = mode
387 islink, isexec = mode
388 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
388 return memfilectx(repo, path, data, islink=islink, isexec=isexec,
389 copied=copied, memctx=memctx)
389 copied=copied, memctx=memctx)
390 if extra is None:
390 if extra is None:
391 extra = {}
391 extra = {}
392 if branch:
392 if branch:
393 extra['branch'] = encoding.fromlocal(branch)
393 extra['branch'] = encoding.fromlocal(branch)
394 ctx = memctx(repo, parents, text, files, getfilectx, user,
394 ctx = memctx(repo, parents, text, files, getfilectx, user,
395 date, extra, editor)
395 date, extra, editor)
396 return ctx
396 return ctx
397
397
398 class changectx(basectx):
398 class changectx(basectx):
399 """A changecontext object makes access to data related to a particular
399 """A changecontext object makes access to data related to a particular
400 changeset convenient. It represents a read-only context already present in
400 changeset convenient. It represents a read-only context already present in
401 the repo."""
401 the repo."""
402 def __init__(self, repo, changeid=''):
402 def __init__(self, repo, changeid=''):
403 """changeid is a revision number, node, or tag"""
403 """changeid is a revision number, node, or tag"""
404
404
405 # since basectx.__new__ already took care of copying the object, we
405 # since basectx.__new__ already took care of copying the object, we
406 # don't need to do anything in __init__, so we just exit here
406 # don't need to do anything in __init__, so we just exit here
407 if isinstance(changeid, basectx):
407 if isinstance(changeid, basectx):
408 return
408 return
409
409
410 if changeid == '':
410 if changeid == '':
411 changeid = '.'
411 changeid = '.'
412 self._repo = repo
412 self._repo = repo
413
413
414 try:
414 try:
415 if isinstance(changeid, int):
415 if isinstance(changeid, int):
416 self._node = repo.changelog.node(changeid)
416 self._node = repo.changelog.node(changeid)
417 self._rev = changeid
417 self._rev = changeid
418 return
418 return
419 if isinstance(changeid, long):
419 if isinstance(changeid, long):
420 changeid = str(changeid)
420 changeid = str(changeid)
421 if changeid == 'null':
421 if changeid == 'null':
422 self._node = nullid
422 self._node = nullid
423 self._rev = nullrev
423 self._rev = nullrev
424 return
424 return
425 if changeid == 'tip':
425 if changeid == 'tip':
426 self._node = repo.changelog.tip()
426 self._node = repo.changelog.tip()
427 self._rev = repo.changelog.rev(self._node)
427 self._rev = repo.changelog.rev(self._node)
428 return
428 return
429 if changeid == '.' or changeid == repo.dirstate.p1():
429 if changeid == '.' or changeid == repo.dirstate.p1():
430 # this is a hack to delay/avoid loading obsmarkers
430 # this is a hack to delay/avoid loading obsmarkers
431 # when we know that '.' won't be hidden
431 # when we know that '.' won't be hidden
432 self._node = repo.dirstate.p1()
432 self._node = repo.dirstate.p1()
433 self._rev = repo.unfiltered().changelog.rev(self._node)
433 self._rev = repo.unfiltered().changelog.rev(self._node)
434 return
434 return
435 if len(changeid) == 20:
435 if len(changeid) == 20:
436 try:
436 try:
437 self._node = changeid
437 self._node = changeid
438 self._rev = repo.changelog.rev(changeid)
438 self._rev = repo.changelog.rev(changeid)
439 return
439 return
440 except error.FilteredRepoLookupError:
440 except error.FilteredRepoLookupError:
441 raise
441 raise
442 except LookupError:
442 except LookupError:
443 pass
443 pass
444
444
445 try:
445 try:
446 r = int(changeid)
446 r = int(changeid)
447 if str(r) != changeid:
447 if str(r) != changeid:
448 raise ValueError
448 raise ValueError
449 l = len(repo.changelog)
449 l = len(repo.changelog)
450 if r < 0:
450 if r < 0:
451 r += l
451 r += l
452 if r < 0 or r >= l:
452 if r < 0 or r >= l:
453 raise ValueError
453 raise ValueError
454 self._rev = r
454 self._rev = r
455 self._node = repo.changelog.node(r)
455 self._node = repo.changelog.node(r)
456 return
456 return
457 except error.FilteredIndexError:
457 except error.FilteredIndexError:
458 raise
458 raise
459 except (ValueError, OverflowError, IndexError):
459 except (ValueError, OverflowError, IndexError):
460 pass
460 pass
461
461
462 if len(changeid) == 40:
462 if len(changeid) == 40:
463 try:
463 try:
464 self._node = bin(changeid)
464 self._node = bin(changeid)
465 self._rev = repo.changelog.rev(self._node)
465 self._rev = repo.changelog.rev(self._node)
466 return
466 return
467 except error.FilteredLookupError:
467 except error.FilteredLookupError:
468 raise
468 raise
469 except (TypeError, LookupError):
469 except (TypeError, LookupError):
470 pass
470 pass
471
471
472 # lookup bookmarks through the name interface
472 # lookup bookmarks through the name interface
473 try:
473 try:
474 self._node = repo.names.singlenode(repo, changeid)
474 self._node = repo.names.singlenode(repo, changeid)
475 self._rev = repo.changelog.rev(self._node)
475 self._rev = repo.changelog.rev(self._node)
476 return
476 return
477 except KeyError:
477 except KeyError:
478 pass
478 pass
479 except error.FilteredRepoLookupError:
479 except error.FilteredRepoLookupError:
480 raise
480 raise
481 except error.RepoLookupError:
481 except error.RepoLookupError:
482 pass
482 pass
483
483
484 self._node = repo.unfiltered().changelog._partialmatch(changeid)
484 self._node = repo.unfiltered().changelog._partialmatch(changeid)
485 if self._node is not None:
485 if self._node is not None:
486 self._rev = repo.changelog.rev(self._node)
486 self._rev = repo.changelog.rev(self._node)
487 return
487 return
488
488
489 # lookup failed
489 # lookup failed
490 # check if it might have come from damaged dirstate
490 # check if it might have come from damaged dirstate
491 #
491 #
492 # XXX we could avoid the unfiltered if we had a recognizable
492 # XXX we could avoid the unfiltered if we had a recognizable
493 # exception for filtered changeset access
493 # exception for filtered changeset access
494 if changeid in repo.unfiltered().dirstate.parents():
494 if changeid in repo.unfiltered().dirstate.parents():
495 msg = _("working directory has unknown parent '%s'!")
495 msg = _("working directory has unknown parent '%s'!")
496 raise error.Abort(msg % short(changeid))
496 raise error.Abort(msg % short(changeid))
497 try:
497 try:
498 if len(changeid) == 20 and nonascii(changeid):
498 if len(changeid) == 20 and nonascii(changeid):
499 changeid = hex(changeid)
499 changeid = hex(changeid)
500 except TypeError:
500 except TypeError:
501 pass
501 pass
502 except (error.FilteredIndexError, error.FilteredLookupError,
502 except (error.FilteredIndexError, error.FilteredLookupError,
503 error.FilteredRepoLookupError):
503 error.FilteredRepoLookupError):
504 if repo.filtername.startswith('visible'):
504 if repo.filtername.startswith('visible'):
505 msg = _("hidden revision '%s'") % changeid
505 msg = _("hidden revision '%s'") % changeid
506 hint = _('use --hidden to access hidden revisions')
506 hint = _('use --hidden to access hidden revisions')
507 raise error.FilteredRepoLookupError(msg, hint=hint)
507 raise error.FilteredRepoLookupError(msg, hint=hint)
508 msg = _("filtered revision '%s' (not in '%s' subset)")
508 msg = _("filtered revision '%s' (not in '%s' subset)")
509 msg %= (changeid, repo.filtername)
509 msg %= (changeid, repo.filtername)
510 raise error.FilteredRepoLookupError(msg)
510 raise error.FilteredRepoLookupError(msg)
511 except IndexError:
511 except IndexError:
512 pass
512 pass
513 raise error.RepoLookupError(
513 raise error.RepoLookupError(
514 _("unknown revision '%s'") % changeid)
514 _("unknown revision '%s'") % changeid)
515
515
516 def __hash__(self):
516 def __hash__(self):
517 try:
517 try:
518 return hash(self._rev)
518 return hash(self._rev)
519 except AttributeError:
519 except AttributeError:
520 return id(self)
520 return id(self)
521
521
522 def __nonzero__(self):
522 def __nonzero__(self):
523 return self._rev != nullrev
523 return self._rev != nullrev
524
524
525 @propertycache
525 @propertycache
526 def _changeset(self):
526 def _changeset(self):
527 return self._repo.changelog.changelogrevision(self.rev())
527 return self._repo.changelog.changelogrevision(self.rev())
528
528
529 @propertycache
529 @propertycache
530 def _manifest(self):
530 def _manifest(self):
531 return self._repo.manifest.read(self._changeset.manifest)
531 return self._repo.manifest.read(self._changeset.manifest)
532
532
533 @propertycache
533 @propertycache
534 def _manifestdelta(self):
534 def _manifestdelta(self):
535 return self._repo.manifest.readdelta(self._changeset.manifest)
535 return self._repo.manifest.readdelta(self._changeset.manifest)
536
536
537 @propertycache
537 @propertycache
538 def _parents(self):
538 def _parents(self):
539 repo = self._repo
539 repo = self._repo
540 p1, p2 = repo.changelog.parentrevs(self._rev)
540 p1, p2 = repo.changelog.parentrevs(self._rev)
541 if p2 == nullrev:
541 if p2 == nullrev:
542 return [changectx(repo, p1)]
542 return [changectx(repo, p1)]
543 return [changectx(repo, p1), changectx(repo, p2)]
543 return [changectx(repo, p1), changectx(repo, p2)]
544
544
545 def changeset(self):
545 def changeset(self):
546 c = self._changeset
546 c = self._changeset
547 return (
547 return (
548 c.manifest,
548 c.manifest,
549 c.user,
549 c.user,
550 c.date,
550 c.date,
551 c.files,
551 c.files,
552 c.description,
552 c.description,
553 c.extra,
553 c.extra,
554 )
554 )
555 def manifestnode(self):
555 def manifestnode(self):
556 return self._changeset.manifest
556 return self._changeset.manifest
557
557
558 def user(self):
558 def user(self):
559 return self._changeset.user
559 return self._changeset.user
560 def date(self):
560 def date(self):
561 return self._changeset.date
561 return self._changeset.date
562 def files(self):
562 def files(self):
563 return self._changeset.files
563 return self._changeset.files
564 def description(self):
564 def description(self):
565 return self._changeset.description
565 return self._changeset.description
566 def branch(self):
566 def branch(self):
567 return encoding.tolocal(self._changeset.extra.get("branch"))
567 return encoding.tolocal(self._changeset.extra.get("branch"))
568 def closesbranch(self):
568 def closesbranch(self):
569 return 'close' in self._changeset.extra
569 return 'close' in self._changeset.extra
570 def extra(self):
570 def extra(self):
571 return self._changeset.extra
571 return self._changeset.extra
572 def tags(self):
572 def tags(self):
573 return self._repo.nodetags(self._node)
573 return self._repo.nodetags(self._node)
574 def bookmarks(self):
574 def bookmarks(self):
575 return self._repo.nodebookmarks(self._node)
575 return self._repo.nodebookmarks(self._node)
576 def phase(self):
576 def phase(self):
577 return self._repo._phasecache.phase(self._repo, self._rev)
577 return self._repo._phasecache.phase(self._repo, self._rev)
578 def hidden(self):
578 def hidden(self):
579 return self._rev in repoview.filterrevs(self._repo, 'visible')
579 return self._rev in repoview.filterrevs(self._repo, 'visible')
580
580
581 def children(self):
581 def children(self):
582 """return contexts for each child changeset"""
582 """return contexts for each child changeset"""
583 c = self._repo.changelog.children(self._node)
583 c = self._repo.changelog.children(self._node)
584 return [changectx(self._repo, x) for x in c]
584 return [changectx(self._repo, x) for x in c]
585
585
586 def ancestors(self):
586 def ancestors(self):
587 for a in self._repo.changelog.ancestors([self._rev]):
587 for a in self._repo.changelog.ancestors([self._rev]):
588 yield changectx(self._repo, a)
588 yield changectx(self._repo, a)
589
589
590 def descendants(self):
590 def descendants(self):
591 for d in self._repo.changelog.descendants([self._rev]):
591 for d in self._repo.changelog.descendants([self._rev]):
592 yield changectx(self._repo, d)
592 yield changectx(self._repo, d)
593
593
594 def filectx(self, path, fileid=None, filelog=None):
594 def filectx(self, path, fileid=None, filelog=None):
595 """get a file context from this changeset"""
595 """get a file context from this changeset"""
596 if fileid is None:
596 if fileid is None:
597 fileid = self.filenode(path)
597 fileid = self.filenode(path)
598 return filectx(self._repo, path, fileid=fileid,
598 return filectx(self._repo, path, fileid=fileid,
599 changectx=self, filelog=filelog)
599 changectx=self, filelog=filelog)
600
600
601 def ancestor(self, c2, warn=False):
601 def ancestor(self, c2, warn=False):
602 """return the "best" ancestor context of self and c2
602 """return the "best" ancestor context of self and c2
603
603
604 If there are multiple candidates, it will show a message and check
604 If there are multiple candidates, it will show a message and check
605 merge.preferancestor configuration before falling back to the
605 merge.preferancestor configuration before falling back to the
606 revlog ancestor."""
606 revlog ancestor."""
607 # deal with workingctxs
607 # deal with workingctxs
608 n2 = c2._node
608 n2 = c2._node
609 if n2 is None:
609 if n2 is None:
610 n2 = c2._parents[0]._node
610 n2 = c2._parents[0]._node
611 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
611 cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
612 if not cahs:
612 if not cahs:
613 anc = nullid
613 anc = nullid
614 elif len(cahs) == 1:
614 elif len(cahs) == 1:
615 anc = cahs[0]
615 anc = cahs[0]
616 else:
616 else:
617 # experimental config: merge.preferancestor
617 # experimental config: merge.preferancestor
618 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
618 for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']):
619 try:
619 try:
620 ctx = changectx(self._repo, r)
620 ctx = changectx(self._repo, r)
621 except error.RepoLookupError:
621 except error.RepoLookupError:
622 continue
622 continue
623 anc = ctx.node()
623 anc = ctx.node()
624 if anc in cahs:
624 if anc in cahs:
625 break
625 break
626 else:
626 else:
627 anc = self._repo.changelog.ancestor(self._node, n2)
627 anc = self._repo.changelog.ancestor(self._node, n2)
628 if warn:
628 if warn:
629 self._repo.ui.status(
629 self._repo.ui.status(
630 (_("note: using %s as ancestor of %s and %s\n") %
630 (_("note: using %s as ancestor of %s and %s\n") %
631 (short(anc), short(self._node), short(n2))) +
631 (short(anc), short(self._node), short(n2))) +
632 ''.join(_(" alternatively, use --config "
632 ''.join(_(" alternatively, use --config "
633 "merge.preferancestor=%s\n") %
633 "merge.preferancestor=%s\n") %
634 short(n) for n in sorted(cahs) if n != anc))
634 short(n) for n in sorted(cahs) if n != anc))
635 return changectx(self._repo, anc)
635 return changectx(self._repo, anc)
636
636
637 def descendant(self, other):
637 def descendant(self, other):
638 """True if other is descendant of this changeset"""
638 """True if other is descendant of this changeset"""
639 return self._repo.changelog.descendant(self._rev, other._rev)
639 return self._repo.changelog.descendant(self._rev, other._rev)
640
640
641 def walk(self, match):
641 def walk(self, match):
642 '''Generates matching file names.'''
642 '''Generates matching file names.'''
643
643
644 # Wrap match.bad method to have message with nodeid
644 # Wrap match.bad method to have message with nodeid
645 def bad(fn, msg):
645 def bad(fn, msg):
646 # The manifest doesn't know about subrepos, so don't complain about
646 # The manifest doesn't know about subrepos, so don't complain about
647 # paths into valid subrepos.
647 # paths into valid subrepos.
648 if any(fn == s or fn.startswith(s + '/')
648 if any(fn == s or fn.startswith(s + '/')
649 for s in self.substate):
649 for s in self.substate):
650 return
650 return
651 match.bad(fn, _('no such file in rev %s') % self)
651 match.bad(fn, _('no such file in rev %s') % self)
652
652
653 m = matchmod.badmatch(match, bad)
653 m = matchmod.badmatch(match, bad)
654 return self._manifest.walk(m)
654 return self._manifest.walk(m)
655
655
656 def matches(self, match):
656 def matches(self, match):
657 return self.walk(match)
657 return self.walk(match)
658
658
659 class basefilectx(object):
659 class basefilectx(object):
660 """A filecontext object represents the common logic for its children:
660 """A filecontext object represents the common logic for its children:
661 filectx: read-only access to a filerevision that is already present
661 filectx: read-only access to a filerevision that is already present
662 in the repo,
662 in the repo,
663 workingfilectx: a filecontext that represents files from the working
663 workingfilectx: a filecontext that represents files from the working
664 directory,
664 directory,
665 memfilectx: a filecontext that represents files in-memory."""
665 memfilectx: a filecontext that represents files in-memory."""
666 def __new__(cls, repo, path, *args, **kwargs):
666 def __new__(cls, repo, path, *args, **kwargs):
667 return super(basefilectx, cls).__new__(cls)
667 return super(basefilectx, cls).__new__(cls)
668
668
669 @propertycache
669 @propertycache
670 def _filelog(self):
670 def _filelog(self):
671 return self._repo.file(self._path)
671 return self._repo.file(self._path)
672
672
673 @propertycache
673 @propertycache
674 def _changeid(self):
674 def _changeid(self):
675 if '_changeid' in self.__dict__:
675 if '_changeid' in self.__dict__:
676 return self._changeid
676 return self._changeid
677 elif '_changectx' in self.__dict__:
677 elif '_changectx' in self.__dict__:
678 return self._changectx.rev()
678 return self._changectx.rev()
679 elif '_descendantrev' in self.__dict__:
679 elif '_descendantrev' in self.__dict__:
680 # this file context was created from a revision with a known
680 # this file context was created from a revision with a known
681 # descendant, we can (lazily) correct for linkrev aliases
681 # descendant, we can (lazily) correct for linkrev aliases
682 return self._adjustlinkrev(self._path, self._filelog,
682 return self._adjustlinkrev(self._path, self._filelog,
683 self._filenode, self._descendantrev)
683 self._filenode, self._descendantrev)
684 else:
684 else:
685 return self._filelog.linkrev(self._filerev)
685 return self._filelog.linkrev(self._filerev)
686
686
687 @propertycache
687 @propertycache
688 def _filenode(self):
688 def _filenode(self):
689 if '_fileid' in self.__dict__:
689 if '_fileid' in self.__dict__:
690 return self._filelog.lookup(self._fileid)
690 return self._filelog.lookup(self._fileid)
691 else:
691 else:
692 return self._changectx.filenode(self._path)
692 return self._changectx.filenode(self._path)
693
693
694 @propertycache
694 @propertycache
695 def _filerev(self):
695 def _filerev(self):
696 return self._filelog.rev(self._filenode)
696 return self._filelog.rev(self._filenode)
697
697
698 @propertycache
698 @propertycache
699 def _repopath(self):
699 def _repopath(self):
700 return self._path
700 return self._path
701
701
702 def __nonzero__(self):
702 def __nonzero__(self):
703 try:
703 try:
704 self._filenode
704 self._filenode
705 return True
705 return True
706 except error.LookupError:
706 except error.LookupError:
707 # file is missing
707 # file is missing
708 return False
708 return False
709
709
710 def __str__(self):
710 def __str__(self):
711 return "%s@%s" % (self.path(), self._changectx)
711 return "%s@%s" % (self.path(), self._changectx)
712
712
713 def __repr__(self):
713 def __repr__(self):
714 return "<%s %s>" % (type(self).__name__, str(self))
714 return "<%s %s>" % (type(self).__name__, str(self))
715
715
716 def __hash__(self):
716 def __hash__(self):
717 try:
717 try:
718 return hash((self._path, self._filenode))
718 return hash((self._path, self._filenode))
719 except AttributeError:
719 except AttributeError:
720 return id(self)
720 return id(self)
721
721
722 def __eq__(self, other):
722 def __eq__(self, other):
723 try:
723 try:
724 return (type(self) == type(other) and self._path == other._path
724 return (type(self) == type(other) and self._path == other._path
725 and self._filenode == other._filenode)
725 and self._filenode == other._filenode)
726 except AttributeError:
726 except AttributeError:
727 return False
727 return False
728
728
729 def __ne__(self, other):
729 def __ne__(self, other):
730 return not (self == other)
730 return not (self == other)
731
731
732 def filerev(self):
732 def filerev(self):
733 return self._filerev
733 return self._filerev
734 def filenode(self):
734 def filenode(self):
735 return self._filenode
735 return self._filenode
736 def flags(self):
736 def flags(self):
737 return self._changectx.flags(self._path)
737 return self._changectx.flags(self._path)
738 def filelog(self):
738 def filelog(self):
739 return self._filelog
739 return self._filelog
740 def rev(self):
740 def rev(self):
741 return self._changeid
741 return self._changeid
742 def linkrev(self):
742 def linkrev(self):
743 return self._filelog.linkrev(self._filerev)
743 return self._filelog.linkrev(self._filerev)
744 def node(self):
744 def node(self):
745 return self._changectx.node()
745 return self._changectx.node()
746 def hex(self):
746 def hex(self):
747 return self._changectx.hex()
747 return self._changectx.hex()
748 def user(self):
748 def user(self):
749 return self._changectx.user()
749 return self._changectx.user()
750 def date(self):
750 def date(self):
751 return self._changectx.date()
751 return self._changectx.date()
752 def files(self):
752 def files(self):
753 return self._changectx.files()
753 return self._changectx.files()
754 def description(self):
754 def description(self):
755 return self._changectx.description()
755 return self._changectx.description()
756 def branch(self):
756 def branch(self):
757 return self._changectx.branch()
757 return self._changectx.branch()
758 def extra(self):
758 def extra(self):
759 return self._changectx.extra()
759 return self._changectx.extra()
760 def phase(self):
760 def phase(self):
761 return self._changectx.phase()
761 return self._changectx.phase()
762 def phasestr(self):
762 def phasestr(self):
763 return self._changectx.phasestr()
763 return self._changectx.phasestr()
764 def manifest(self):
764 def manifest(self):
765 return self._changectx.manifest()
765 return self._changectx.manifest()
766 def changectx(self):
766 def changectx(self):
767 return self._changectx
767 return self._changectx
768 def repo(self):
768 def repo(self):
769 return self._repo
769 return self._repo
770
770
771 def path(self):
771 def path(self):
772 return self._path
772 return self._path
773
773
774 def isbinary(self):
774 def isbinary(self):
775 try:
775 try:
776 return util.binary(self.data())
776 return util.binary(self.data())
777 except IOError:
777 except IOError:
778 return False
778 return False
779 def isexec(self):
779 def isexec(self):
780 return 'x' in self.flags()
780 return 'x' in self.flags()
781 def islink(self):
781 def islink(self):
782 return 'l' in self.flags()
782 return 'l' in self.flags()
783
783
784 def isabsent(self):
784 def isabsent(self):
785 """whether this filectx represents a file not in self._changectx
785 """whether this filectx represents a file not in self._changectx
786
786
787 This is mainly for merge code to detect change/delete conflicts. This is
787 This is mainly for merge code to detect change/delete conflicts. This is
788 expected to be True for all subclasses of basectx."""
788 expected to be True for all subclasses of basectx."""
789 return False
789 return False
790
790
791 _customcmp = False
791 _customcmp = False
792 def cmp(self, fctx):
792 def cmp(self, fctx):
793 """compare with other file context
793 """compare with other file context
794
794
795 returns True if different than fctx.
795 returns True if different than fctx.
796 """
796 """
797 if fctx._customcmp:
797 if fctx._customcmp:
798 return fctx.cmp(self)
798 return fctx.cmp(self)
799
799
800 if (fctx._filenode is None
800 if (fctx._filenode is None
801 and (self._repo._encodefilterpats
801 and (self._repo._encodefilterpats
802 # if file data starts with '\1\n', empty metadata block is
802 # if file data starts with '\1\n', empty metadata block is
803 # prepended, which adds 4 bytes to filelog.size().
803 # prepended, which adds 4 bytes to filelog.size().
804 or self.size() - 4 == fctx.size())
804 or self.size() - 4 == fctx.size())
805 or self.size() == fctx.size()):
805 or self.size() == fctx.size()):
806 return self._filelog.cmp(self._filenode, fctx.data())
806 return self._filelog.cmp(self._filenode, fctx.data())
807
807
808 return True
808 return True
809
809
810 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
810 def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False):
811 """return the first ancestor of <srcrev> introducing <fnode>
811 """return the first ancestor of <srcrev> introducing <fnode>
812
812
813 If the linkrev of the file revision does not point to an ancestor of
813 If the linkrev of the file revision does not point to an ancestor of
814 srcrev, we'll walk down the ancestors until we find one introducing
814 srcrev, we'll walk down the ancestors until we find one introducing
815 this file revision.
815 this file revision.
816
816
817 :repo: a localrepository object (used to access changelog and manifest)
817 :repo: a localrepository object (used to access changelog and manifest)
818 :path: the file path
818 :path: the file path
819 :fnode: the nodeid of the file revision
819 :fnode: the nodeid of the file revision
820 :filelog: the filelog of this path
820 :filelog: the filelog of this path
821 :srcrev: the changeset revision we search ancestors from
821 :srcrev: the changeset revision we search ancestors from
822 :inclusive: if true, the src revision will also be checked
822 :inclusive: if true, the src revision will also be checked
823 """
823 """
824 repo = self._repo
824 repo = self._repo
825 cl = repo.unfiltered().changelog
825 cl = repo.unfiltered().changelog
826 ma = repo.manifest
826 ma = repo.manifest
827 # fetch the linkrev
827 # fetch the linkrev
828 fr = filelog.rev(fnode)
828 fr = filelog.rev(fnode)
829 lkr = filelog.linkrev(fr)
829 lkr = filelog.linkrev(fr)
830 # hack to reuse ancestor computation when searching for renames
830 # hack to reuse ancestor computation when searching for renames
831 memberanc = getattr(self, '_ancestrycontext', None)
831 memberanc = getattr(self, '_ancestrycontext', None)
832 iteranc = None
832 iteranc = None
833 if srcrev is None:
833 if srcrev is None:
834 # wctx case, used by workingfilectx during mergecopy
834 # wctx case, used by workingfilectx during mergecopy
835 revs = [p.rev() for p in self._repo[None].parents()]
835 revs = [p.rev() for p in self._repo[None].parents()]
836 inclusive = True # we skipped the real (revless) source
836 inclusive = True # we skipped the real (revless) source
837 else:
837 else:
838 revs = [srcrev]
838 revs = [srcrev]
839 if memberanc is None:
839 if memberanc is None:
840 memberanc = iteranc = cl.ancestors(revs, lkr,
840 memberanc = iteranc = cl.ancestors(revs, lkr,
841 inclusive=inclusive)
841 inclusive=inclusive)
842 # check if this linkrev is an ancestor of srcrev
842 # check if this linkrev is an ancestor of srcrev
843 if lkr not in memberanc:
843 if lkr not in memberanc:
844 if iteranc is None:
844 if iteranc is None:
845 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
845 iteranc = cl.ancestors(revs, lkr, inclusive=inclusive)
846 for a in iteranc:
846 for a in iteranc:
847 ac = cl.read(a) # get changeset data (we avoid object creation)
847 ac = cl.read(a) # get changeset data (we avoid object creation)
848 if path in ac[3]: # checking the 'files' field.
848 if path in ac[3]: # checking the 'files' field.
849 # The file has been touched, check if the content is
849 # The file has been touched, check if the content is
850 # similar to the one we search for.
850 # similar to the one we search for.
851 if fnode == ma.readfast(ac[0]).get(path):
851 if fnode == ma.readfast(ac[0]).get(path):
852 return a
852 return a
853 # In theory, we should never get out of that loop without a result.
853 # In theory, we should never get out of that loop without a result.
854 # But if manifest uses a buggy file revision (not children of the
854 # But if manifest uses a buggy file revision (not children of the
855 # one it replaces) we could. Such a buggy situation will likely
855 # one it replaces) we could. Such a buggy situation will likely
856 # result is crash somewhere else at to some point.
856 # result is crash somewhere else at to some point.
857 return lkr
857 return lkr
858
858
859 def introrev(self):
859 def introrev(self):
860 """return the rev of the changeset which introduced this file revision
860 """return the rev of the changeset which introduced this file revision
861
861
862 This method is different from linkrev because it take into account the
862 This method is different from linkrev because it take into account the
863 changeset the filectx was created from. It ensures the returned
863 changeset the filectx was created from. It ensures the returned
864 revision is one of its ancestors. This prevents bugs from
864 revision is one of its ancestors. This prevents bugs from
865 'linkrev-shadowing' when a file revision is used by multiple
865 'linkrev-shadowing' when a file revision is used by multiple
866 changesets.
866 changesets.
867 """
867 """
868 lkr = self.linkrev()
868 lkr = self.linkrev()
869 attrs = vars(self)
869 attrs = vars(self)
870 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
870 noctx = not ('_changeid' in attrs or '_changectx' in attrs)
871 if noctx or self.rev() == lkr:
871 if noctx or self.rev() == lkr:
872 return self.linkrev()
872 return self.linkrev()
873 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
873 return self._adjustlinkrev(self._path, self._filelog, self._filenode,
874 self.rev(), inclusive=True)
874 self.rev(), inclusive=True)
875
875
876 def _parentfilectx(self, path, fileid, filelog):
876 def _parentfilectx(self, path, fileid, filelog):
877 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
877 """create parent filectx keeping ancestry info for _adjustlinkrev()"""
878 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
878 fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog)
879 if '_changeid' in vars(self) or '_changectx' in vars(self):
879 if '_changeid' in vars(self) or '_changectx' in vars(self):
880 # If self is associated with a changeset (probably explicitly
880 # If self is associated with a changeset (probably explicitly
881 # fed), ensure the created filectx is associated with a
881 # fed), ensure the created filectx is associated with a
882 # changeset that is an ancestor of self.changectx.
882 # changeset that is an ancestor of self.changectx.
883 # This lets us later use _adjustlinkrev to get a correct link.
883 # This lets us later use _adjustlinkrev to get a correct link.
884 fctx._descendantrev = self.rev()
884 fctx._descendantrev = self.rev()
885 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
885 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
886 elif '_descendantrev' in vars(self):
886 elif '_descendantrev' in vars(self):
887 # Otherwise propagate _descendantrev if we have one associated.
887 # Otherwise propagate _descendantrev if we have one associated.
888 fctx._descendantrev = self._descendantrev
888 fctx._descendantrev = self._descendantrev
889 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
889 fctx._ancestrycontext = getattr(self, '_ancestrycontext', None)
890 return fctx
890 return fctx
891
891
892 def parents(self):
892 def parents(self):
893 _path = self._path
893 _path = self._path
894 fl = self._filelog
894 fl = self._filelog
895 parents = self._filelog.parents(self._filenode)
895 parents = self._filelog.parents(self._filenode)
896 pl = [(_path, node, fl) for node in parents if node != nullid]
896 pl = [(_path, node, fl) for node in parents if node != nullid]
897
897
898 r = fl.renamed(self._filenode)
898 r = fl.renamed(self._filenode)
899 if r:
899 if r:
900 # - In the simple rename case, both parent are nullid, pl is empty.
900 # - In the simple rename case, both parent are nullid, pl is empty.
901 # - In case of merge, only one of the parent is null id and should
901 # - In case of merge, only one of the parent is null id and should
902 # be replaced with the rename information. This parent is -always-
902 # be replaced with the rename information. This parent is -always-
903 # the first one.
903 # the first one.
904 #
904 #
905 # As null id have always been filtered out in the previous list
905 # As null id have always been filtered out in the previous list
906 # comprehension, inserting to 0 will always result in "replacing
906 # comprehension, inserting to 0 will always result in "replacing
907 # first nullid parent with rename information.
907 # first nullid parent with rename information.
908 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
908 pl.insert(0, (r[0], r[1], self._repo.file(r[0])))
909
909
910 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
910 return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl]
911
911
912 def p1(self):
912 def p1(self):
913 return self.parents()[0]
913 return self.parents()[0]
914
914
915 def p2(self):
915 def p2(self):
916 p = self.parents()
916 p = self.parents()
917 if len(p) == 2:
917 if len(p) == 2:
918 return p[1]
918 return p[1]
919 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
919 return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog)
920
920
921 def annotate(self, follow=False, linenumber=None, diffopts=None):
921 def annotate(self, follow=False, linenumber=None, diffopts=None):
922 '''returns a list of tuples of (ctx, line) for each line
922 '''returns a list of tuples of (ctx, line) for each line
923 in the file, where ctx is the filectx of the node where
923 in the file, where ctx is the filectx of the node where
924 that line was last changed.
924 that line was last changed.
925 This returns tuples of ((ctx, linenumber), line) for each line,
925 This returns tuples of ((ctx, linenumber), line) for each line,
926 if "linenumber" parameter is NOT "None".
926 if "linenumber" parameter is NOT "None".
927 In such tuples, linenumber means one at the first appearance
927 In such tuples, linenumber means one at the first appearance
928 in the managed file.
928 in the managed file.
929 To reduce annotation cost,
929 To reduce annotation cost,
930 this returns fixed value(False is used) as linenumber,
930 this returns fixed value(False is used) as linenumber,
931 if "linenumber" parameter is "False".'''
931 if "linenumber" parameter is "False".'''
932
932
933 if linenumber is None:
933 if linenumber is None:
934 def decorate(text, rev):
934 def decorate(text, rev):
935 return ([rev] * len(text.splitlines()), text)
935 return ([rev] * len(text.splitlines()), text)
936 elif linenumber:
936 elif linenumber:
937 def decorate(text, rev):
937 def decorate(text, rev):
938 size = len(text.splitlines())
938 size = len(text.splitlines())
939 return ([(rev, i) for i in xrange(1, size + 1)], text)
939 return ([(rev, i) for i in xrange(1, size + 1)], text)
940 else:
940 else:
941 def decorate(text, rev):
941 def decorate(text, rev):
942 return ([(rev, False)] * len(text.splitlines()), text)
942 return ([(rev, False)] * len(text.splitlines()), text)
943
943
944 def pair(parent, child):
944 def pair(parent, child):
945 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
945 blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
946 refine=True)
946 refine=True)
947 for (a1, a2, b1, b2), t in blocks:
947 for (a1, a2, b1, b2), t in blocks:
948 # Changed blocks ('!') or blocks made only of blank lines ('~')
948 # Changed blocks ('!') or blocks made only of blank lines ('~')
949 # belong to the child.
949 # belong to the child.
950 if t == '=':
950 if t == '=':
951 child[0][b1:b2] = parent[0][a1:a2]
951 child[0][b1:b2] = parent[0][a1:a2]
952 return child
952 return child
953
953
954 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
954 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
955
955
956 def parents(f):
956 def parents(f):
957 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
957 # Cut _descendantrev here to mitigate the penalty of lazy linkrev
958 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
958 # adjustment. Otherwise, p._adjustlinkrev() would walk changelog
959 # from the topmost introrev (= srcrev) down to p.linkrev() if it
959 # from the topmost introrev (= srcrev) down to p.linkrev() if it
960 # isn't an ancestor of the srcrev.
960 # isn't an ancestor of the srcrev.
961 f._changeid
961 f._changeid
962 pl = f.parents()
962 pl = f.parents()
963
963
964 # Don't return renamed parents if we aren't following.
964 # Don't return renamed parents if we aren't following.
965 if not follow:
965 if not follow:
966 pl = [p for p in pl if p.path() == f.path()]
966 pl = [p for p in pl if p.path() == f.path()]
967
967
968 # renamed filectx won't have a filelog yet, so set it
968 # renamed filectx won't have a filelog yet, so set it
969 # from the cache to save time
969 # from the cache to save time
970 for p in pl:
970 for p in pl:
971 if not '_filelog' in p.__dict__:
971 if not '_filelog' in p.__dict__:
972 p._filelog = getlog(p.path())
972 p._filelog = getlog(p.path())
973
973
974 return pl
974 return pl
975
975
976 # use linkrev to find the first changeset where self appeared
976 # use linkrev to find the first changeset where self appeared
977 base = self
977 base = self
978 introrev = self.introrev()
978 introrev = self.introrev()
979 if self.rev() != introrev:
979 if self.rev() != introrev:
980 base = self.filectx(self.filenode(), changeid=introrev)
980 base = self.filectx(self.filenode(), changeid=introrev)
981 if getattr(base, '_ancestrycontext', None) is None:
981 if getattr(base, '_ancestrycontext', None) is None:
982 cl = self._repo.changelog
982 cl = self._repo.changelog
983 if introrev is None:
983 if introrev is None:
984 # wctx is not inclusive, but works because _ancestrycontext
984 # wctx is not inclusive, but works because _ancestrycontext
985 # is used to test filelog revisions
985 # is used to test filelog revisions
986 ac = cl.ancestors([p.rev() for p in base.parents()],
986 ac = cl.ancestors([p.rev() for p in base.parents()],
987 inclusive=True)
987 inclusive=True)
988 else:
988 else:
989 ac = cl.ancestors([introrev], inclusive=True)
989 ac = cl.ancestors([introrev], inclusive=True)
990 base._ancestrycontext = ac
990 base._ancestrycontext = ac
991
991
992 # This algorithm would prefer to be recursive, but Python is a
992 # This algorithm would prefer to be recursive, but Python is a
993 # bit recursion-hostile. Instead we do an iterative
993 # bit recursion-hostile. Instead we do an iterative
994 # depth-first search.
994 # depth-first search.
995
995
996 visit = [base]
996 visit = [base]
997 hist = {}
997 hist = {}
998 pcache = {}
998 pcache = {}
999 needed = {base: 1}
999 needed = {base: 1}
1000 while visit:
1000 while visit:
1001 f = visit[-1]
1001 f = visit[-1]
1002 pcached = f in pcache
1002 pcached = f in pcache
1003 if not pcached:
1003 if not pcached:
1004 pcache[f] = parents(f)
1004 pcache[f] = parents(f)
1005
1005
1006 ready = True
1006 ready = True
1007 pl = pcache[f]
1007 pl = pcache[f]
1008 for p in pl:
1008 for p in pl:
1009 if p not in hist:
1009 if p not in hist:
1010 ready = False
1010 ready = False
1011 visit.append(p)
1011 visit.append(p)
1012 if not pcached:
1012 if not pcached:
1013 needed[p] = needed.get(p, 0) + 1
1013 needed[p] = needed.get(p, 0) + 1
1014 if ready:
1014 if ready:
1015 visit.pop()
1015 visit.pop()
1016 reusable = f in hist
1016 reusable = f in hist
1017 if reusable:
1017 if reusable:
1018 curr = hist[f]
1018 curr = hist[f]
1019 else:
1019 else:
1020 curr = decorate(f.data(), f)
1020 curr = decorate(f.data(), f)
1021 for p in pl:
1021 for p in pl:
1022 if not reusable:
1022 if not reusable:
1023 curr = pair(hist[p], curr)
1023 curr = pair(hist[p], curr)
1024 if needed[p] == 1:
1024 if needed[p] == 1:
1025 del hist[p]
1025 del hist[p]
1026 del needed[p]
1026 del needed[p]
1027 else:
1027 else:
1028 needed[p] -= 1
1028 needed[p] -= 1
1029
1029
1030 hist[f] = curr
1030 hist[f] = curr
1031 pcache[f] = []
1031 pcache[f] = []
1032
1032
1033 return zip(hist[base][0], hist[base][1].splitlines(True))
1033 return zip(hist[base][0], hist[base][1].splitlines(True))
1034
1034
1035 def ancestors(self, followfirst=False):
1035 def ancestors(self, followfirst=False):
1036 visit = {}
1036 visit = {}
1037 c = self
1037 c = self
1038 if followfirst:
1038 if followfirst:
1039 cut = 1
1039 cut = 1
1040 else:
1040 else:
1041 cut = None
1041 cut = None
1042
1042
1043 while True:
1043 while True:
1044 for parent in c.parents()[:cut]:
1044 for parent in c.parents()[:cut]:
1045 visit[(parent.linkrev(), parent.filenode())] = parent
1045 visit[(parent.linkrev(), parent.filenode())] = parent
1046 if not visit:
1046 if not visit:
1047 break
1047 break
1048 c = visit.pop(max(visit))
1048 c = visit.pop(max(visit))
1049 yield c
1049 yield c
1050
1050
1051 class filectx(basefilectx):
1051 class filectx(basefilectx):
1052 """A filecontext object makes access to data related to a particular
1052 """A filecontext object makes access to data related to a particular
1053 filerevision convenient."""
1053 filerevision convenient."""
1054 def __init__(self, repo, path, changeid=None, fileid=None,
1054 def __init__(self, repo, path, changeid=None, fileid=None,
1055 filelog=None, changectx=None):
1055 filelog=None, changectx=None):
1056 """changeid can be a changeset revision, node, or tag.
1056 """changeid can be a changeset revision, node, or tag.
1057 fileid can be a file revision or node."""
1057 fileid can be a file revision or node."""
1058 self._repo = repo
1058 self._repo = repo
1059 self._path = path
1059 self._path = path
1060
1060
1061 assert (changeid is not None
1061 assert (changeid is not None
1062 or fileid is not None
1062 or fileid is not None
1063 or changectx is not None), \
1063 or changectx is not None), \
1064 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1064 ("bad args: changeid=%r, fileid=%r, changectx=%r"
1065 % (changeid, fileid, changectx))
1065 % (changeid, fileid, changectx))
1066
1066
1067 if filelog is not None:
1067 if filelog is not None:
1068 self._filelog = filelog
1068 self._filelog = filelog
1069
1069
1070 if changeid is not None:
1070 if changeid is not None:
1071 self._changeid = changeid
1071 self._changeid = changeid
1072 if changectx is not None:
1072 if changectx is not None:
1073 self._changectx = changectx
1073 self._changectx = changectx
1074 if fileid is not None:
1074 if fileid is not None:
1075 self._fileid = fileid
1075 self._fileid = fileid
1076
1076
1077 @propertycache
1077 @propertycache
1078 def _changectx(self):
1078 def _changectx(self):
1079 try:
1079 try:
1080 return changectx(self._repo, self._changeid)
1080 return changectx(self._repo, self._changeid)
1081 except error.FilteredRepoLookupError:
1081 except error.FilteredRepoLookupError:
1082 # Linkrev may point to any revision in the repository. When the
1082 # Linkrev may point to any revision in the repository. When the
1083 # repository is filtered this may lead to `filectx` trying to build
1083 # repository is filtered this may lead to `filectx` trying to build
1084 # `changectx` for filtered revision. In such case we fallback to
1084 # `changectx` for filtered revision. In such case we fallback to
1085 # creating `changectx` on the unfiltered version of the reposition.
1085 # creating `changectx` on the unfiltered version of the reposition.
1086 # This fallback should not be an issue because `changectx` from
1086 # This fallback should not be an issue because `changectx` from
1087 # `filectx` are not used in complex operations that care about
1087 # `filectx` are not used in complex operations that care about
1088 # filtering.
1088 # filtering.
1089 #
1089 #
1090 # This fallback is a cheap and dirty fix that prevent several
1090 # This fallback is a cheap and dirty fix that prevent several
1091 # crashes. It does not ensure the behavior is correct. However the
1091 # crashes. It does not ensure the behavior is correct. However the
1092 # behavior was not correct before filtering either and "incorrect
1092 # behavior was not correct before filtering either and "incorrect
1093 # behavior" is seen as better as "crash"
1093 # behavior" is seen as better as "crash"
1094 #
1094 #
1095 # Linkrevs have several serious troubles with filtering that are
1095 # Linkrevs have several serious troubles with filtering that are
1096 # complicated to solve. Proper handling of the issue here should be
1096 # complicated to solve. Proper handling of the issue here should be
1097 # considered when solving linkrev issue are on the table.
1097 # considered when solving linkrev issue are on the table.
1098 return changectx(self._repo.unfiltered(), self._changeid)
1098 return changectx(self._repo.unfiltered(), self._changeid)
1099
1099
1100 def filectx(self, fileid, changeid=None):
1100 def filectx(self, fileid, changeid=None):
1101 '''opens an arbitrary revision of the file without
1101 '''opens an arbitrary revision of the file without
1102 opening a new filelog'''
1102 opening a new filelog'''
1103 return filectx(self._repo, self._path, fileid=fileid,
1103 return filectx(self._repo, self._path, fileid=fileid,
1104 filelog=self._filelog, changeid=changeid)
1104 filelog=self._filelog, changeid=changeid)
1105
1105
1106 def data(self):
1106 def data(self):
1107 try:
1107 try:
1108 return self._filelog.read(self._filenode)
1108 return self._filelog.read(self._filenode)
1109 except error.CensoredNodeError:
1109 except error.CensoredNodeError:
1110 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1110 if self._repo.ui.config("censor", "policy", "abort") == "ignore":
1111 return ""
1111 return ""
1112 raise error.Abort(_("censored node: %s") % short(self._filenode),
1112 raise error.Abort(_("censored node: %s") % short(self._filenode),
1113 hint=_("set censor.policy to ignore errors"))
1113 hint=_("set censor.policy to ignore errors"))
1114
1114
1115 def size(self):
1115 def size(self):
1116 return self._filelog.size(self._filerev)
1116 return self._filelog.size(self._filerev)
1117
1117
1118 def renamed(self):
1118 def renamed(self):
1119 """check if file was actually renamed in this changeset revision
1119 """check if file was actually renamed in this changeset revision
1120
1120
1121 If rename logged in file revision, we report copy for changeset only
1121 If rename logged in file revision, we report copy for changeset only
1122 if file revisions linkrev points back to the changeset in question
1122 if file revisions linkrev points back to the changeset in question
1123 or both changeset parents contain different file revisions.
1123 or both changeset parents contain different file revisions.
1124 """
1124 """
1125
1125
1126 renamed = self._filelog.renamed(self._filenode)
1126 renamed = self._filelog.renamed(self._filenode)
1127 if not renamed:
1127 if not renamed:
1128 return renamed
1128 return renamed
1129
1129
1130 if self.rev() == self.linkrev():
1130 if self.rev() == self.linkrev():
1131 return renamed
1131 return renamed
1132
1132
1133 name = self.path()
1133 name = self.path()
1134 fnode = self._filenode
1134 fnode = self._filenode
1135 for p in self._changectx.parents():
1135 for p in self._changectx.parents():
1136 try:
1136 try:
1137 if fnode == p.filenode(name):
1137 if fnode == p.filenode(name):
1138 return None
1138 return None
1139 except error.LookupError:
1139 except error.LookupError:
1140 pass
1140 pass
1141 return renamed
1141 return renamed
1142
1142
1143 def children(self):
1143 def children(self):
1144 # hard for renames
1144 # hard for renames
1145 c = self._filelog.children(self._filenode)
1145 c = self._filelog.children(self._filenode)
1146 return [filectx(self._repo, self._path, fileid=x,
1146 return [filectx(self._repo, self._path, fileid=x,
1147 filelog=self._filelog) for x in c]
1147 filelog=self._filelog) for x in c]
1148
1148
1149 class committablectx(basectx):
1149 class committablectx(basectx):
1150 """A committablectx object provides common functionality for a context that
1150 """A committablectx object provides common functionality for a context that
1151 wants the ability to commit, e.g. workingctx or memctx."""
1151 wants the ability to commit, e.g. workingctx or memctx."""
1152 def __init__(self, repo, text="", user=None, date=None, extra=None,
1152 def __init__(self, repo, text="", user=None, date=None, extra=None,
1153 changes=None):
1153 changes=None):
1154 self._repo = repo
1154 self._repo = repo
1155 self._rev = None
1155 self._rev = None
1156 self._node = None
1156 self._node = None
1157 self._text = text
1157 self._text = text
1158 if date:
1158 if date:
1159 self._date = util.parsedate(date)
1159 self._date = util.parsedate(date)
1160 if user:
1160 if user:
1161 self._user = user
1161 self._user = user
1162 if changes:
1162 if changes:
1163 self._status = changes
1163 self._status = changes
1164
1164
1165 self._extra = {}
1165 self._extra = {}
1166 if extra:
1166 if extra:
1167 self._extra = extra.copy()
1167 self._extra = extra.copy()
1168 if 'branch' not in self._extra:
1168 if 'branch' not in self._extra:
1169 try:
1169 try:
1170 branch = encoding.fromlocal(self._repo.dirstate.branch())
1170 branch = encoding.fromlocal(self._repo.dirstate.branch())
1171 except UnicodeDecodeError:
1171 except UnicodeDecodeError:
1172 raise error.Abort(_('branch name not in UTF-8!'))
1172 raise error.Abort(_('branch name not in UTF-8!'))
1173 self._extra['branch'] = branch
1173 self._extra['branch'] = branch
1174 if self._extra['branch'] == '':
1174 if self._extra['branch'] == '':
1175 self._extra['branch'] = 'default'
1175 self._extra['branch'] = 'default'
1176
1176
1177 def __str__(self):
1177 def __str__(self):
1178 return str(self._parents[0]) + "+"
1178 return str(self._parents[0]) + "+"
1179
1179
1180 def __nonzero__(self):
1180 def __nonzero__(self):
1181 return True
1181 return True
1182
1182
1183 def _buildflagfunc(self):
1183 def _buildflagfunc(self):
1184 # Create a fallback function for getting file flags when the
1184 # Create a fallback function for getting file flags when the
1185 # filesystem doesn't support them
1185 # filesystem doesn't support them
1186
1186
1187 copiesget = self._repo.dirstate.copies().get
1187 copiesget = self._repo.dirstate.copies().get
1188 parents = self.parents()
1188 parents = self.parents()
1189 if len(parents) < 2:
1189 if len(parents) < 2:
1190 # when we have one parent, it's easy: copy from parent
1190 # when we have one parent, it's easy: copy from parent
1191 man = parents[0].manifest()
1191 man = parents[0].manifest()
1192 def func(f):
1192 def func(f):
1193 f = copiesget(f, f)
1193 f = copiesget(f, f)
1194 return man.flags(f)
1194 return man.flags(f)
1195 else:
1195 else:
1196 # merges are tricky: we try to reconstruct the unstored
1196 # merges are tricky: we try to reconstruct the unstored
1197 # result from the merge (issue1802)
1197 # result from the merge (issue1802)
1198 p1, p2 = parents
1198 p1, p2 = parents
1199 pa = p1.ancestor(p2)
1199 pa = p1.ancestor(p2)
1200 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1200 m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
1201
1201
1202 def func(f):
1202 def func(f):
1203 f = copiesget(f, f) # may be wrong for merges with copies
1203 f = copiesget(f, f) # may be wrong for merges with copies
1204 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1204 fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
1205 if fl1 == fl2:
1205 if fl1 == fl2:
1206 return fl1
1206 return fl1
1207 if fl1 == fla:
1207 if fl1 == fla:
1208 return fl2
1208 return fl2
1209 if fl2 == fla:
1209 if fl2 == fla:
1210 return fl1
1210 return fl1
1211 return '' # punt for conflicts
1211 return '' # punt for conflicts
1212
1212
1213 return func
1213 return func
1214
1214
1215 @propertycache
1215 @propertycache
1216 def _flagfunc(self):
1216 def _flagfunc(self):
1217 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1217 return self._repo.dirstate.flagfunc(self._buildflagfunc)
1218
1218
1219 @propertycache
1219 @propertycache
1220 def _manifest(self):
1220 def _manifest(self):
1221 """generate a manifest corresponding to the values in self._status
1221 """generate a manifest corresponding to the values in self._status
1222
1222
1223 This reuse the file nodeid from parent, but we append an extra letter
1223 This reuse the file nodeid from parent, but we append an extra letter
1224 when modified. Modified files get an extra 'm' while added files get
1224 when modified. Modified files get an extra 'm' while added files get
1225 an extra 'a'. This is used by manifests merge to see that files
1225 an extra 'a'. This is used by manifests merge to see that files
1226 are different and by update logic to avoid deleting newly added files.
1226 are different and by update logic to avoid deleting newly added files.
1227 """
1227 """
1228 parents = self.parents()
1228 parents = self.parents()
1229
1229
1230 man1 = parents[0].manifest()
1230 man1 = parents[0].manifest()
1231 man = man1.copy()
1231 man = man1.copy()
1232 if len(parents) > 1:
1232 if len(parents) > 1:
1233 man2 = self.p2().manifest()
1233 man2 = self.p2().manifest()
1234 def getman(f):
1234 def getman(f):
1235 if f in man1:
1235 if f in man1:
1236 return man1
1236 return man1
1237 return man2
1237 return man2
1238 else:
1238 else:
1239 getman = lambda f: man1
1239 getman = lambda f: man1
1240
1240
1241 copied = self._repo.dirstate.copies()
1241 copied = self._repo.dirstate.copies()
1242 ff = self._flagfunc
1242 ff = self._flagfunc
1243 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1243 for i, l in (("a", self._status.added), ("m", self._status.modified)):
1244 for f in l:
1244 for f in l:
1245 orig = copied.get(f, f)
1245 orig = copied.get(f, f)
1246 man[f] = getman(orig).get(orig, nullid) + i
1246 man[f] = getman(orig).get(orig, nullid) + i
1247 try:
1247 try:
1248 man.setflag(f, ff(f))
1248 man.setflag(f, ff(f))
1249 except OSError:
1249 except OSError:
1250 pass
1250 pass
1251
1251
1252 for f in self._status.deleted + self._status.removed:
1252 for f in self._status.deleted + self._status.removed:
1253 if f in man:
1253 if f in man:
1254 del man[f]
1254 del man[f]
1255
1255
1256 return man
1256 return man
1257
1257
1258 @propertycache
1258 @propertycache
1259 def _status(self):
1259 def _status(self):
1260 return self._repo.status()
1260 return self._repo.status()
1261
1261
1262 @propertycache
1262 @propertycache
1263 def _user(self):
1263 def _user(self):
1264 return self._repo.ui.username()
1264 return self._repo.ui.username()
1265
1265
1266 @propertycache
1266 @propertycache
1267 def _date(self):
1267 def _date(self):
1268 return util.makedate()
1268 return util.makedate()
1269
1269
1270 def subrev(self, subpath):
1270 def subrev(self, subpath):
1271 return None
1271 return None
1272
1272
1273 def manifestnode(self):
1273 def manifestnode(self):
1274 return None
1274 return None
1275 def user(self):
1275 def user(self):
1276 return self._user or self._repo.ui.username()
1276 return self._user or self._repo.ui.username()
1277 def date(self):
1277 def date(self):
1278 return self._date
1278 return self._date
1279 def description(self):
1279 def description(self):
1280 return self._text
1280 return self._text
1281 def files(self):
1281 def files(self):
1282 return sorted(self._status.modified + self._status.added +
1282 return sorted(self._status.modified + self._status.added +
1283 self._status.removed)
1283 self._status.removed)
1284
1284
1285 def modified(self):
1285 def modified(self):
1286 return self._status.modified
1286 return self._status.modified
1287 def added(self):
1287 def added(self):
1288 return self._status.added
1288 return self._status.added
1289 def removed(self):
1289 def removed(self):
1290 return self._status.removed
1290 return self._status.removed
1291 def deleted(self):
1291 def deleted(self):
1292 return self._status.deleted
1292 return self._status.deleted
1293 def branch(self):
1293 def branch(self):
1294 return encoding.tolocal(self._extra['branch'])
1294 return encoding.tolocal(self._extra['branch'])
1295 def closesbranch(self):
1295 def closesbranch(self):
1296 return 'close' in self._extra
1296 return 'close' in self._extra
1297 def extra(self):
1297 def extra(self):
1298 return self._extra
1298 return self._extra
1299
1299
1300 def tags(self):
1300 def tags(self):
1301 return []
1301 return []
1302
1302
1303 def bookmarks(self):
1303 def bookmarks(self):
1304 b = []
1304 b = []
1305 for p in self.parents():
1305 for p in self.parents():
1306 b.extend(p.bookmarks())
1306 b.extend(p.bookmarks())
1307 return b
1307 return b
1308
1308
1309 def phase(self):
1309 def phase(self):
1310 phase = phases.draft # default phase to draft
1310 phase = phases.draft # default phase to draft
1311 for p in self.parents():
1311 for p in self.parents():
1312 phase = max(phase, p.phase())
1312 phase = max(phase, p.phase())
1313 return phase
1313 return phase
1314
1314
1315 def hidden(self):
1315 def hidden(self):
1316 return False
1316 return False
1317
1317
1318 def children(self):
1318 def children(self):
1319 return []
1319 return []
1320
1320
1321 def flags(self, path):
1321 def flags(self, path):
1322 if '_manifest' in self.__dict__:
1322 if '_manifest' in self.__dict__:
1323 try:
1323 try:
1324 return self._manifest.flags(path)
1324 return self._manifest.flags(path)
1325 except KeyError:
1325 except KeyError:
1326 return ''
1326 return ''
1327
1327
1328 try:
1328 try:
1329 return self._flagfunc(path)
1329 return self._flagfunc(path)
1330 except OSError:
1330 except OSError:
1331 return ''
1331 return ''
1332
1332
1333 def ancestor(self, c2):
1333 def ancestor(self, c2):
1334 """return the "best" ancestor context of self and c2"""
1334 """return the "best" ancestor context of self and c2"""
1335 return self._parents[0].ancestor(c2) # punt on two parents for now
1335 return self._parents[0].ancestor(c2) # punt on two parents for now
1336
1336
1337 def walk(self, match):
1337 def walk(self, match):
1338 '''Generates matching file names.'''
1338 '''Generates matching file names.'''
1339 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1339 return sorted(self._repo.dirstate.walk(match, sorted(self.substate),
1340 True, False))
1340 True, False))
1341
1341
1342 def matches(self, match):
1342 def matches(self, match):
1343 return sorted(self._repo.dirstate.matches(match))
1343 return sorted(self._repo.dirstate.matches(match))
1344
1344
1345 def ancestors(self):
1345 def ancestors(self):
1346 for p in self._parents:
1346 for p in self._parents:
1347 yield p
1347 yield p
1348 for a in self._repo.changelog.ancestors(
1348 for a in self._repo.changelog.ancestors(
1349 [p.rev() for p in self._parents]):
1349 [p.rev() for p in self._parents]):
1350 yield changectx(self._repo, a)
1350 yield changectx(self._repo, a)
1351
1351
1352 def markcommitted(self, node):
1352 def markcommitted(self, node):
1353 """Perform post-commit cleanup necessary after committing this ctx
1353 """Perform post-commit cleanup necessary after committing this ctx
1354
1354
1355 Specifically, this updates backing stores this working context
1355 Specifically, this updates backing stores this working context
1356 wraps to reflect the fact that the changes reflected by this
1356 wraps to reflect the fact that the changes reflected by this
1357 workingctx have been committed. For example, it marks
1357 workingctx have been committed. For example, it marks
1358 modified and added files as normal in the dirstate.
1358 modified and added files as normal in the dirstate.
1359
1359
1360 """
1360 """
1361
1361
1362 self._repo.dirstate.beginparentchange()
1362 self._repo.dirstate.beginparentchange()
1363 for f in self.modified() + self.added():
1363 for f in self.modified() + self.added():
1364 self._repo.dirstate.normal(f)
1364 self._repo.dirstate.normal(f)
1365 for f in self.removed():
1365 for f in self.removed():
1366 self._repo.dirstate.drop(f)
1366 self._repo.dirstate.drop(f)
1367 self._repo.dirstate.setparents(node)
1367 self._repo.dirstate.setparents(node)
1368 self._repo.dirstate.endparentchange()
1368 self._repo.dirstate.endparentchange()
1369
1369
1370 # write changes out explicitly, because nesting wlock at
1370 # write changes out explicitly, because nesting wlock at
1371 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1371 # runtime may prevent 'wlock.release()' in 'repo.commit()'
1372 # from immediately doing so for subsequent changing files
1372 # from immediately doing so for subsequent changing files
1373 self._repo.dirstate.write(self._repo.currenttransaction())
1373 self._repo.dirstate.write(self._repo.currenttransaction())
1374
1374
1375 class workingctx(committablectx):
1375 class workingctx(committablectx):
1376 """A workingctx object makes access to data related to
1376 """A workingctx object makes access to data related to
1377 the current working directory convenient.
1377 the current working directory convenient.
1378 date - any valid date string or (unixtime, offset), or None.
1378 date - any valid date string or (unixtime, offset), or None.
1379 user - username string, or None.
1379 user - username string, or None.
1380 extra - a dictionary of extra values, or None.
1380 extra - a dictionary of extra values, or None.
1381 changes - a list of file lists as returned by localrepo.status()
1381 changes - a list of file lists as returned by localrepo.status()
1382 or None to use the repository status.
1382 or None to use the repository status.
1383 """
1383 """
1384 def __init__(self, repo, text="", user=None, date=None, extra=None,
1384 def __init__(self, repo, text="", user=None, date=None, extra=None,
1385 changes=None):
1385 changes=None):
1386 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1386 super(workingctx, self).__init__(repo, text, user, date, extra, changes)
1387
1387
1388 def __iter__(self):
1388 def __iter__(self):
1389 d = self._repo.dirstate
1389 d = self._repo.dirstate
1390 for f in d:
1390 for f in d:
1391 if d[f] != 'r':
1391 if d[f] != 'r':
1392 yield f
1392 yield f
1393
1393
1394 def __contains__(self, key):
1394 def __contains__(self, key):
1395 return self._repo.dirstate[key] not in "?r"
1395 return self._repo.dirstate[key] not in "?r"
1396
1396
1397 def hex(self):
1397 def hex(self):
1398 return hex(wdirid)
1398 return hex(wdirid)
1399
1399
1400 @propertycache
1400 @propertycache
1401 def _parents(self):
1401 def _parents(self):
1402 p = self._repo.dirstate.parents()
1402 p = self._repo.dirstate.parents()
1403 if p[1] == nullid:
1403 if p[1] == nullid:
1404 p = p[:-1]
1404 p = p[:-1]
1405 return [changectx(self._repo, x) for x in p]
1405 return [changectx(self._repo, x) for x in p]
1406
1406
1407 def filectx(self, path, filelog=None):
1407 def filectx(self, path, filelog=None):
1408 """get a file context from the working directory"""
1408 """get a file context from the working directory"""
1409 return workingfilectx(self._repo, path, workingctx=self,
1409 return workingfilectx(self._repo, path, workingctx=self,
1410 filelog=filelog)
1410 filelog=filelog)
1411
1411
1412 def dirty(self, missing=False, merge=True, branch=True):
1412 def dirty(self, missing=False, merge=True, branch=True):
1413 "check whether a working directory is modified"
1413 "check whether a working directory is modified"
1414 # check subrepos first
1414 # check subrepos first
1415 for s in sorted(self.substate):
1415 for s in sorted(self.substate):
1416 if self.sub(s).dirty():
1416 if self.sub(s).dirty():
1417 return True
1417 return True
1418 # check current working dir
1418 # check current working dir
1419 return ((merge and self.p2()) or
1419 return ((merge and self.p2()) or
1420 (branch and self.branch() != self.p1().branch()) or
1420 (branch and self.branch() != self.p1().branch()) or
1421 self.modified() or self.added() or self.removed() or
1421 self.modified() or self.added() or self.removed() or
1422 (missing and self.deleted()))
1422 (missing and self.deleted()))
1423
1423
1424 def add(self, list, prefix=""):
1424 def add(self, list, prefix=""):
1425 join = lambda f: os.path.join(prefix, f)
1425 join = lambda f: os.path.join(prefix, f)
1426 with self._repo.wlock():
1426 with self._repo.wlock():
1427 ui, ds = self._repo.ui, self._repo.dirstate
1427 ui, ds = self._repo.ui, self._repo.dirstate
1428 rejected = []
1428 rejected = []
1429 lstat = self._repo.wvfs.lstat
1429 lstat = self._repo.wvfs.lstat
1430 for f in list:
1430 for f in list:
1431 scmutil.checkportable(ui, join(f))
1431 scmutil.checkportable(ui, join(f))
1432 try:
1432 try:
1433 st = lstat(f)
1433 st = lstat(f)
1434 except OSError:
1434 except OSError:
1435 ui.warn(_("%s does not exist!\n") % join(f))
1435 ui.warn(_("%s does not exist!\n") % join(f))
1436 rejected.append(f)
1436 rejected.append(f)
1437 continue
1437 continue
1438 if st.st_size > 10000000:
1438 if st.st_size > 10000000:
1439 ui.warn(_("%s: up to %d MB of RAM may be required "
1439 ui.warn(_("%s: up to %d MB of RAM may be required "
1440 "to manage this file\n"
1440 "to manage this file\n"
1441 "(use 'hg revert %s' to cancel the "
1441 "(use 'hg revert %s' to cancel the "
1442 "pending addition)\n")
1442 "pending addition)\n")
1443 % (f, 3 * st.st_size // 1000000, join(f)))
1443 % (f, 3 * st.st_size // 1000000, join(f)))
1444 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1444 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1445 ui.warn(_("%s not added: only files and symlinks "
1445 ui.warn(_("%s not added: only files and symlinks "
1446 "supported currently\n") % join(f))
1446 "supported currently\n") % join(f))
1447 rejected.append(f)
1447 rejected.append(f)
1448 elif ds[f] in 'amn':
1448 elif ds[f] in 'amn':
1449 ui.warn(_("%s already tracked!\n") % join(f))
1449 ui.warn(_("%s already tracked!\n") % join(f))
1450 elif ds[f] == 'r':
1450 elif ds[f] == 'r':
1451 ds.normallookup(f)
1451 ds.normallookup(f)
1452 else:
1452 else:
1453 ds.add(f)
1453 ds.add(f)
1454 return rejected
1454 return rejected
1455
1455
1456 def forget(self, files, prefix=""):
1456 def forget(self, files, prefix=""):
1457 join = lambda f: os.path.join(prefix, f)
1457 join = lambda f: os.path.join(prefix, f)
1458 with self._repo.wlock():
1458 with self._repo.wlock():
1459 rejected = []
1459 rejected = []
1460 for f in files:
1460 for f in files:
1461 if f not in self._repo.dirstate:
1461 if f not in self._repo.dirstate:
1462 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1462 self._repo.ui.warn(_("%s not tracked!\n") % join(f))
1463 rejected.append(f)
1463 rejected.append(f)
1464 elif self._repo.dirstate[f] != 'a':
1464 elif self._repo.dirstate[f] != 'a':
1465 self._repo.dirstate.remove(f)
1465 self._repo.dirstate.remove(f)
1466 else:
1466 else:
1467 self._repo.dirstate.drop(f)
1467 self._repo.dirstate.drop(f)
1468 return rejected
1468 return rejected
1469
1469
1470 def undelete(self, list):
1470 def undelete(self, list):
1471 pctxs = self.parents()
1471 pctxs = self.parents()
1472 with self._repo.wlock():
1472 with self._repo.wlock():
1473 for f in list:
1473 for f in list:
1474 if self._repo.dirstate[f] != 'r':
1474 if self._repo.dirstate[f] != 'r':
1475 self._repo.ui.warn(_("%s not removed!\n") % f)
1475 self._repo.ui.warn(_("%s not removed!\n") % f)
1476 else:
1476 else:
1477 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1477 fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f]
1478 t = fctx.data()
1478 t = fctx.data()
1479 self._repo.wwrite(f, t, fctx.flags())
1479 self._repo.wwrite(f, t, fctx.flags())
1480 self._repo.dirstate.normal(f)
1480 self._repo.dirstate.normal(f)
1481
1481
1482 def copy(self, source, dest):
1482 def copy(self, source, dest):
1483 try:
1483 try:
1484 st = self._repo.wvfs.lstat(dest)
1484 st = self._repo.wvfs.lstat(dest)
1485 except OSError as err:
1485 except OSError as err:
1486 if err.errno != errno.ENOENT:
1486 if err.errno != errno.ENOENT:
1487 raise
1487 raise
1488 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1488 self._repo.ui.warn(_("%s does not exist!\n") % dest)
1489 return
1489 return
1490 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1490 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1491 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1491 self._repo.ui.warn(_("copy failed: %s is not a file or a "
1492 "symbolic link\n") % dest)
1492 "symbolic link\n") % dest)
1493 else:
1493 else:
1494 with self._repo.wlock():
1494 with self._repo.wlock():
1495 if self._repo.dirstate[dest] in '?':
1495 if self._repo.dirstate[dest] in '?':
1496 self._repo.dirstate.add(dest)
1496 self._repo.dirstate.add(dest)
1497 elif self._repo.dirstate[dest] in 'r':
1497 elif self._repo.dirstate[dest] in 'r':
1498 self._repo.dirstate.normallookup(dest)
1498 self._repo.dirstate.normallookup(dest)
1499 self._repo.dirstate.copy(source, dest)
1499 self._repo.dirstate.copy(source, dest)
1500
1500
1501 def match(self, pats=[], include=None, exclude=None, default='glob',
1501 def match(self, pats=[], include=None, exclude=None, default='glob',
1502 listsubrepos=False, badfn=None):
1502 listsubrepos=False, badfn=None):
1503 r = self._repo
1503 r = self._repo
1504
1504
1505 # Only a case insensitive filesystem needs magic to translate user input
1505 # Only a case insensitive filesystem needs magic to translate user input
1506 # to actual case in the filesystem.
1506 # to actual case in the filesystem.
1507 if not util.checkcase(r.root):
1507 if not util.checkcase(r.root):
1508 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1508 return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include,
1509 exclude, default, r.auditor, self,
1509 exclude, default, r.auditor, self,
1510 listsubrepos=listsubrepos,
1510 listsubrepos=listsubrepos,
1511 badfn=badfn)
1511 badfn=badfn)
1512 return matchmod.match(r.root, r.getcwd(), pats,
1512 return matchmod.match(r.root, r.getcwd(), pats,
1513 include, exclude, default,
1513 include, exclude, default,
1514 auditor=r.auditor, ctx=self,
1514 auditor=r.auditor, ctx=self,
1515 listsubrepos=listsubrepos, badfn=badfn)
1515 listsubrepos=listsubrepos, badfn=badfn)
1516
1516
1517 def _filtersuspectsymlink(self, files):
1517 def _filtersuspectsymlink(self, files):
1518 if not files or self._repo.dirstate._checklink:
1518 if not files or self._repo.dirstate._checklink:
1519 return files
1519 return files
1520
1520
1521 # Symlink placeholders may get non-symlink-like contents
1521 # Symlink placeholders may get non-symlink-like contents
1522 # via user error or dereferencing by NFS or Samba servers,
1522 # via user error or dereferencing by NFS or Samba servers,
1523 # so we filter out any placeholders that don't look like a
1523 # so we filter out any placeholders that don't look like a
1524 # symlink
1524 # symlink
1525 sane = []
1525 sane = []
1526 for f in files:
1526 for f in files:
1527 if self.flags(f) == 'l':
1527 if self.flags(f) == 'l':
1528 d = self[f].data()
1528 d = self[f].data()
1529 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1529 if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d):
1530 self._repo.ui.debug('ignoring suspect symlink placeholder'
1530 self._repo.ui.debug('ignoring suspect symlink placeholder'
1531 ' "%s"\n' % f)
1531 ' "%s"\n' % f)
1532 continue
1532 continue
1533 sane.append(f)
1533 sane.append(f)
1534 return sane
1534 return sane
1535
1535
1536 def _checklookup(self, files):
1536 def _checklookup(self, files):
1537 # check for any possibly clean files
1537 # check for any possibly clean files
1538 if not files:
1538 if not files:
1539 return [], []
1539 return [], []
1540
1540
1541 modified = []
1541 modified = []
1542 fixup = []
1542 fixup = []
1543 pctx = self._parents[0]
1543 pctx = self._parents[0]
1544 # do a full compare of any files that might have changed
1544 # do a full compare of any files that might have changed
1545 for f in sorted(files):
1545 for f in sorted(files):
1546 if (f not in pctx or self.flags(f) != pctx.flags(f)
1546 if (f not in pctx or self.flags(f) != pctx.flags(f)
1547 or pctx[f].cmp(self[f])):
1547 or pctx[f].cmp(self[f])):
1548 modified.append(f)
1548 modified.append(f)
1549 else:
1549 else:
1550 fixup.append(f)
1550 fixup.append(f)
1551
1551
1552 # update dirstate for files that are actually clean
1552 # update dirstate for files that are actually clean
1553 if fixup:
1553 if fixup:
1554 try:
1554 try:
1555 # updating the dirstate is optional
1555 # updating the dirstate is optional
1556 # so we don't wait on the lock
1556 # so we don't wait on the lock
1557 # wlock can invalidate the dirstate, so cache normal _after_
1557 # wlock can invalidate the dirstate, so cache normal _after_
1558 # taking the lock
1558 # taking the lock
1559 with self._repo.wlock(False):
1559 with self._repo.wlock(False):
1560 normal = self._repo.dirstate.normal
1560 normal = self._repo.dirstate.normal
1561 for f in fixup:
1561 for f in fixup:
1562 normal(f)
1562 normal(f)
1563 # write changes out explicitly, because nesting
1563 # write changes out explicitly, because nesting
1564 # wlock at runtime may prevent 'wlock.release()'
1564 # wlock at runtime may prevent 'wlock.release()'
1565 # after this block from doing so for subsequent
1565 # after this block from doing so for subsequent
1566 # changing files
1566 # changing files
1567 self._repo.dirstate.write(self._repo.currenttransaction())
1567 self._repo.dirstate.write(self._repo.currenttransaction())
1568 except error.LockError:
1568 except error.LockError:
1569 pass
1569 pass
1570 return modified, fixup
1570 return modified, fixup
1571
1571
1572 def _manifestmatches(self, match, s):
1572 def _manifestmatches(self, match, s):
1573 """Slow path for workingctx
1573 """Slow path for workingctx
1574
1574
1575 The fast path is when we compare the working directory to its parent
1575 The fast path is when we compare the working directory to its parent
1576 which means this function is comparing with a non-parent; therefore we
1576 which means this function is comparing with a non-parent; therefore we
1577 need to build a manifest and return what matches.
1577 need to build a manifest and return what matches.
1578 """
1578 """
1579 mf = self._repo['.']._manifestmatches(match, s)
1579 mf = self._repo['.']._manifestmatches(match, s)
1580 for f in s.modified + s.added:
1580 for f in s.modified + s.added:
1581 mf[f] = _newnode
1581 mf[f] = _newnode
1582 mf.setflag(f, self.flags(f))
1582 mf.setflag(f, self.flags(f))
1583 for f in s.removed:
1583 for f in s.removed:
1584 if f in mf:
1584 if f in mf:
1585 del mf[f]
1585 del mf[f]
1586 return mf
1586 return mf
1587
1587
1588 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1588 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1589 unknown=False):
1589 unknown=False):
1590 '''Gets the status from the dirstate -- internal use only.'''
1590 '''Gets the status from the dirstate -- internal use only.'''
1591 listignored, listclean, listunknown = ignored, clean, unknown
1591 listignored, listclean, listunknown = ignored, clean, unknown
1592 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1592 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1593 subrepos = []
1593 subrepos = []
1594 if '.hgsub' in self:
1594 if '.hgsub' in self:
1595 subrepos = sorted(self.substate)
1595 subrepos = sorted(self.substate)
1596 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1596 cmp, s = self._repo.dirstate.status(match, subrepos, listignored,
1597 listclean, listunknown)
1597 listclean, listunknown)
1598
1598
1599 # check for any possibly clean files
1599 # check for any possibly clean files
1600 if cmp:
1600 if cmp:
1601 modified2, fixup = self._checklookup(cmp)
1601 modified2, fixup = self._checklookup(cmp)
1602 s.modified.extend(modified2)
1602 s.modified.extend(modified2)
1603
1603
1604 # update dirstate for files that are actually clean
1604 # update dirstate for files that are actually clean
1605 if fixup and listclean:
1605 if fixup and listclean:
1606 s.clean.extend(fixup)
1606 s.clean.extend(fixup)
1607
1607
1608 if match.always():
1608 if match.always():
1609 # cache for performance
1609 # cache for performance
1610 if s.unknown or s.ignored or s.clean:
1610 if s.unknown or s.ignored or s.clean:
1611 # "_status" is cached with list*=False in the normal route
1611 # "_status" is cached with list*=False in the normal route
1612 self._status = scmutil.status(s.modified, s.added, s.removed,
1612 self._status = scmutil.status(s.modified, s.added, s.removed,
1613 s.deleted, [], [], [])
1613 s.deleted, [], [], [])
1614 else:
1614 else:
1615 self._status = s
1615 self._status = s
1616
1616
1617 return s
1617 return s
1618
1618
1619 def _buildstatus(self, other, s, match, listignored, listclean,
1619 def _buildstatus(self, other, s, match, listignored, listclean,
1620 listunknown):
1620 listunknown):
1621 """build a status with respect to another context
1621 """build a status with respect to another context
1622
1622
1623 This includes logic for maintaining the fast path of status when
1623 This includes logic for maintaining the fast path of status when
1624 comparing the working directory against its parent, which is to skip
1624 comparing the working directory against its parent, which is to skip
1625 building a new manifest if self (working directory) is not comparing
1625 building a new manifest if self (working directory) is not comparing
1626 against its parent (repo['.']).
1626 against its parent (repo['.']).
1627 """
1627 """
1628 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1628 s = self._dirstatestatus(match, listignored, listclean, listunknown)
1629 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1629 # Filter out symlinks that, in the case of FAT32 and NTFS filesystems,
1630 # might have accidentally ended up with the entire contents of the file
1630 # might have accidentally ended up with the entire contents of the file
1631 # they are supposed to be linking to.
1631 # they are supposed to be linking to.
1632 s.modified[:] = self._filtersuspectsymlink(s.modified)
1632 s.modified[:] = self._filtersuspectsymlink(s.modified)
1633 if other != self._repo['.']:
1633 if other != self._repo['.']:
1634 s = super(workingctx, self)._buildstatus(other, s, match,
1634 s = super(workingctx, self)._buildstatus(other, s, match,
1635 listignored, listclean,
1635 listignored, listclean,
1636 listunknown)
1636 listunknown)
1637 return s
1637 return s
1638
1638
1639 def _matchstatus(self, other, match):
1639 def _matchstatus(self, other, match):
1640 """override the match method with a filter for directory patterns
1640 """override the match method with a filter for directory patterns
1641
1641
1642 We use inheritance to customize the match.bad method only in cases of
1642 We use inheritance to customize the match.bad method only in cases of
1643 workingctx since it belongs only to the working directory when
1643 workingctx since it belongs only to the working directory when
1644 comparing against the parent changeset.
1644 comparing against the parent changeset.
1645
1645
1646 If we aren't comparing against the working directory's parent, then we
1646 If we aren't comparing against the working directory's parent, then we
1647 just use the default match object sent to us.
1647 just use the default match object sent to us.
1648 """
1648 """
1649 superself = super(workingctx, self)
1649 superself = super(workingctx, self)
1650 match = superself._matchstatus(other, match)
1650 match = superself._matchstatus(other, match)
1651 if other != self._repo['.']:
1651 if other != self._repo['.']:
1652 def bad(f, msg):
1652 def bad(f, msg):
1653 # 'f' may be a directory pattern from 'match.files()',
1653 # 'f' may be a directory pattern from 'match.files()',
1654 # so 'f not in ctx1' is not enough
1654 # so 'f not in ctx1' is not enough
1655 if f not in other and not other.hasdir(f):
1655 if f not in other and not other.hasdir(f):
1656 self._repo.ui.warn('%s: %s\n' %
1656 self._repo.ui.warn('%s: %s\n' %
1657 (self._repo.dirstate.pathto(f), msg))
1657 (self._repo.dirstate.pathto(f), msg))
1658 match.bad = bad
1658 match.bad = bad
1659 return match
1659 return match
1660
1660
1661 class committablefilectx(basefilectx):
1661 class committablefilectx(basefilectx):
1662 """A committablefilectx provides common functionality for a file context
1662 """A committablefilectx provides common functionality for a file context
1663 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1663 that wants the ability to commit, e.g. workingfilectx or memfilectx."""
1664 def __init__(self, repo, path, filelog=None, ctx=None):
1664 def __init__(self, repo, path, filelog=None, ctx=None):
1665 self._repo = repo
1665 self._repo = repo
1666 self._path = path
1666 self._path = path
1667 self._changeid = None
1667 self._changeid = None
1668 self._filerev = self._filenode = None
1668 self._filerev = self._filenode = None
1669
1669
1670 if filelog is not None:
1670 if filelog is not None:
1671 self._filelog = filelog
1671 self._filelog = filelog
1672 if ctx:
1672 if ctx:
1673 self._changectx = ctx
1673 self._changectx = ctx
1674
1674
1675 def __nonzero__(self):
1675 def __nonzero__(self):
1676 return True
1676 return True
1677
1677
1678 def linkrev(self):
1678 def linkrev(self):
1679 # linked to self._changectx no matter if file is modified or not
1679 # linked to self._changectx no matter if file is modified or not
1680 return self.rev()
1680 return self.rev()
1681
1681
1682 def parents(self):
1682 def parents(self):
1683 '''return parent filectxs, following copies if necessary'''
1683 '''return parent filectxs, following copies if necessary'''
1684 def filenode(ctx, path):
1684 def filenode(ctx, path):
1685 return ctx._manifest.get(path, nullid)
1685 return ctx._manifest.get(path, nullid)
1686
1686
1687 path = self._path
1687 path = self._path
1688 fl = self._filelog
1688 fl = self._filelog
1689 pcl = self._changectx._parents
1689 pcl = self._changectx._parents
1690 renamed = self.renamed()
1690 renamed = self.renamed()
1691
1691
1692 if renamed:
1692 if renamed:
1693 pl = [renamed + (None,)]
1693 pl = [renamed + (None,)]
1694 else:
1694 else:
1695 pl = [(path, filenode(pcl[0], path), fl)]
1695 pl = [(path, filenode(pcl[0], path), fl)]
1696
1696
1697 for pc in pcl[1:]:
1697 for pc in pcl[1:]:
1698 pl.append((path, filenode(pc, path), fl))
1698 pl.append((path, filenode(pc, path), fl))
1699
1699
1700 return [self._parentfilectx(p, fileid=n, filelog=l)
1700 return [self._parentfilectx(p, fileid=n, filelog=l)
1701 for p, n, l in pl if n != nullid]
1701 for p, n, l in pl if n != nullid]
1702
1702
1703 def children(self):
1703 def children(self):
1704 return []
1704 return []
1705
1705
1706 class workingfilectx(committablefilectx):
1706 class workingfilectx(committablefilectx):
1707 """A workingfilectx object makes access to data related to a particular
1707 """A workingfilectx object makes access to data related to a particular
1708 file in the working directory convenient."""
1708 file in the working directory convenient."""
1709 def __init__(self, repo, path, filelog=None, workingctx=None):
1709 def __init__(self, repo, path, filelog=None, workingctx=None):
1710 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1710 super(workingfilectx, self).__init__(repo, path, filelog, workingctx)
1711
1711
1712 @propertycache
1712 @propertycache
1713 def _changectx(self):
1713 def _changectx(self):
1714 return workingctx(self._repo)
1714 return workingctx(self._repo)
1715
1715
1716 def data(self):
1716 def data(self):
1717 return self._repo.wread(self._path)
1717 return self._repo.wread(self._path)
1718 def renamed(self):
1718 def renamed(self):
1719 rp = self._repo.dirstate.copied(self._path)
1719 rp = self._repo.dirstate.copied(self._path)
1720 if not rp:
1720 if not rp:
1721 return None
1721 return None
1722 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1722 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
1723
1723
1724 def size(self):
1724 def size(self):
1725 return self._repo.wvfs.lstat(self._path).st_size
1725 return self._repo.wvfs.lstat(self._path).st_size
1726 def date(self):
1726 def date(self):
1727 t, tz = self._changectx.date()
1727 t, tz = self._changectx.date()
1728 try:
1728 try:
1729 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1729 return (self._repo.wvfs.lstat(self._path).st_mtime, tz)
1730 except OSError as err:
1730 except OSError as err:
1731 if err.errno != errno.ENOENT:
1731 if err.errno != errno.ENOENT:
1732 raise
1732 raise
1733 return (t, tz)
1733 return (t, tz)
1734
1734
1735 def cmp(self, fctx):
1735 def cmp(self, fctx):
1736 """compare with other file context
1736 """compare with other file context
1737
1737
1738 returns True if different than fctx.
1738 returns True if different than fctx.
1739 """
1739 """
1740 # fctx should be a filectx (not a workingfilectx)
1740 # fctx should be a filectx (not a workingfilectx)
1741 # invert comparison to reuse the same code path
1741 # invert comparison to reuse the same code path
1742 return fctx.cmp(self)
1742 return fctx.cmp(self)
1743
1743
1744 def remove(self, ignoremissing=False):
1744 def remove(self, ignoremissing=False):
1745 """wraps unlink for a repo's working directory"""
1745 """wraps unlink for a repo's working directory"""
1746 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1746 util.unlinkpath(self._repo.wjoin(self._path), ignoremissing)
1747
1747
1748 def write(self, data, flags):
1748 def write(self, data, flags):
1749 """wraps repo.wwrite"""
1749 """wraps repo.wwrite"""
1750 self._repo.wwrite(self._path, data, flags)
1750 self._repo.wwrite(self._path, data, flags)
1751
1751
1752 class workingcommitctx(workingctx):
1752 class workingcommitctx(workingctx):
1753 """A workingcommitctx object makes access to data related to
1753 """A workingcommitctx object makes access to data related to
1754 the revision being committed convenient.
1754 the revision being committed convenient.
1755
1755
1756 This hides changes in the working directory, if they aren't
1756 This hides changes in the working directory, if they aren't
1757 committed in this context.
1757 committed in this context.
1758 """
1758 """
1759 def __init__(self, repo, changes,
1759 def __init__(self, repo, changes,
1760 text="", user=None, date=None, extra=None):
1760 text="", user=None, date=None, extra=None):
1761 super(workingctx, self).__init__(repo, text, user, date, extra,
1761 super(workingctx, self).__init__(repo, text, user, date, extra,
1762 changes)
1762 changes)
1763
1763
1764 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1764 def _dirstatestatus(self, match=None, ignored=False, clean=False,
1765 unknown=False):
1765 unknown=False):
1766 """Return matched files only in ``self._status``
1766 """Return matched files only in ``self._status``
1767
1767
1768 Uncommitted files appear "clean" via this context, even if
1768 Uncommitted files appear "clean" via this context, even if
1769 they aren't actually so in the working directory.
1769 they aren't actually so in the working directory.
1770 """
1770 """
1771 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1771 match = match or matchmod.always(self._repo.root, self._repo.getcwd())
1772 if clean:
1772 if clean:
1773 clean = [f for f in self._manifest if f not in self._changedset]
1773 clean = [f for f in self._manifest if f not in self._changedset]
1774 else:
1774 else:
1775 clean = []
1775 clean = []
1776 return scmutil.status([f for f in self._status.modified if match(f)],
1776 return scmutil.status([f for f in self._status.modified if match(f)],
1777 [f for f in self._status.added if match(f)],
1777 [f for f in self._status.added if match(f)],
1778 [f for f in self._status.removed if match(f)],
1778 [f for f in self._status.removed if match(f)],
1779 [], [], [], clean)
1779 [], [], [], clean)
1780
1780
1781 @propertycache
1781 @propertycache
1782 def _changedset(self):
1782 def _changedset(self):
1783 """Return the set of files changed in this context
1783 """Return the set of files changed in this context
1784 """
1784 """
1785 changed = set(self._status.modified)
1785 changed = set(self._status.modified)
1786 changed.update(self._status.added)
1786 changed.update(self._status.added)
1787 changed.update(self._status.removed)
1787 changed.update(self._status.removed)
1788 return changed
1788 return changed
1789
1789
1790 def makecachingfilectxfn(func):
1790 def makecachingfilectxfn(func):
1791 """Create a filectxfn that caches based on the path.
1791 """Create a filectxfn that caches based on the path.
1792
1792
1793 We can't use util.cachefunc because it uses all arguments as the cache
1793 We can't use util.cachefunc because it uses all arguments as the cache
1794 key and this creates a cycle since the arguments include the repo and
1794 key and this creates a cycle since the arguments include the repo and
1795 memctx.
1795 memctx.
1796 """
1796 """
1797 cache = {}
1797 cache = {}
1798
1798
1799 def getfilectx(repo, memctx, path):
1799 def getfilectx(repo, memctx, path):
1800 if path not in cache:
1800 if path not in cache:
1801 cache[path] = func(repo, memctx, path)
1801 cache[path] = func(repo, memctx, path)
1802 return cache[path]
1802 return cache[path]
1803
1803
1804 return getfilectx
1804 return getfilectx
1805
1805
1806 class memctx(committablectx):
1806 class memctx(committablectx):
1807 """Use memctx to perform in-memory commits via localrepo.commitctx().
1807 """Use memctx to perform in-memory commits via localrepo.commitctx().
1808
1808
1809 Revision information is supplied at initialization time while
1809 Revision information is supplied at initialization time while
1810 related files data and is made available through a callback
1810 related files data and is made available through a callback
1811 mechanism. 'repo' is the current localrepo, 'parents' is a
1811 mechanism. 'repo' is the current localrepo, 'parents' is a
1812 sequence of two parent revisions identifiers (pass None for every
1812 sequence of two parent revisions identifiers (pass None for every
1813 missing parent), 'text' is the commit message and 'files' lists
1813 missing parent), 'text' is the commit message and 'files' lists
1814 names of files touched by the revision (normalized and relative to
1814 names of files touched by the revision (normalized and relative to
1815 repository root).
1815 repository root).
1816
1816
1817 filectxfn(repo, memctx, path) is a callable receiving the
1817 filectxfn(repo, memctx, path) is a callable receiving the
1818 repository, the current memctx object and the normalized path of
1818 repository, the current memctx object and the normalized path of
1819 requested file, relative to repository root. It is fired by the
1819 requested file, relative to repository root. It is fired by the
1820 commit function for every file in 'files', but calls order is
1820 commit function for every file in 'files', but calls order is
1821 undefined. If the file is available in the revision being
1821 undefined. If the file is available in the revision being
1822 committed (updated or added), filectxfn returns a memfilectx
1822 committed (updated or added), filectxfn returns a memfilectx
1823 object. If the file was removed, filectxfn raises an
1823 object. If the file was removed, filectxfn raises an
1824 IOError. Moved files are represented by marking the source file
1824 IOError. Moved files are represented by marking the source file
1825 removed and the new file added with copy information (see
1825 removed and the new file added with copy information (see
1826 memfilectx).
1826 memfilectx).
1827
1827
1828 user receives the committer name and defaults to current
1828 user receives the committer name and defaults to current
1829 repository username, date is the commit date in any format
1829 repository username, date is the commit date in any format
1830 supported by util.parsedate() and defaults to current date, extra
1830 supported by util.parsedate() and defaults to current date, extra
1831 is a dictionary of metadata or is left empty.
1831 is a dictionary of metadata or is left empty.
1832 """
1832 """
1833
1833
1834 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1834 # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files.
1835 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1835 # Extensions that need to retain compatibility across Mercurial 3.1 can use
1836 # this field to determine what to do in filectxfn.
1836 # this field to determine what to do in filectxfn.
1837 _returnnoneformissingfiles = True
1837 _returnnoneformissingfiles = True
1838
1838
1839 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1839 def __init__(self, repo, parents, text, files, filectxfn, user=None,
1840 date=None, extra=None, editor=False):
1840 date=None, extra=None, editor=False):
1841 super(memctx, self).__init__(repo, text, user, date, extra)
1841 super(memctx, self).__init__(repo, text, user, date, extra)
1842 self._rev = None
1842 self._rev = None
1843 self._node = None
1843 self._node = None
1844 parents = [(p or nullid) for p in parents]
1844 parents = [(p or nullid) for p in parents]
1845 p1, p2 = parents
1845 p1, p2 = parents
1846 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1846 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
1847 files = sorted(set(files))
1847 files = sorted(set(files))
1848 self._files = files
1848 self._files = files
1849 self.substate = {}
1849 self.substate = {}
1850
1850
1851 # if store is not callable, wrap it in a function
1851 # if store is not callable, wrap it in a function
1852 if not callable(filectxfn):
1852 if not callable(filectxfn):
1853 def getfilectx(repo, memctx, path):
1853 def getfilectx(repo, memctx, path):
1854 fctx = filectxfn[path]
1854 fctx = filectxfn[path]
1855 # this is weird but apparently we only keep track of one parent
1855 # this is weird but apparently we only keep track of one parent
1856 # (why not only store that instead of a tuple?)
1856 # (why not only store that instead of a tuple?)
1857 copied = fctx.renamed()
1857 copied = fctx.renamed()
1858 if copied:
1858 if copied:
1859 copied = copied[0]
1859 copied = copied[0]
1860 return memfilectx(repo, path, fctx.data(),
1860 return memfilectx(repo, path, fctx.data(),
1861 islink=fctx.islink(), isexec=fctx.isexec(),
1861 islink=fctx.islink(), isexec=fctx.isexec(),
1862 copied=copied, memctx=memctx)
1862 copied=copied, memctx=memctx)
1863 self._filectxfn = getfilectx
1863 self._filectxfn = getfilectx
1864 else:
1864 else:
1865 # memoizing increases performance for e.g. vcs convert scenarios.
1865 # memoizing increases performance for e.g. vcs convert scenarios.
1866 self._filectxfn = makecachingfilectxfn(filectxfn)
1866 self._filectxfn = makecachingfilectxfn(filectxfn)
1867
1867
1868 if extra:
1868 if extra:
1869 self._extra = extra.copy()
1869 self._extra = extra.copy()
1870 else:
1870 else:
1871 self._extra = {}
1871 self._extra = {}
1872
1872
1873 if self._extra.get('branch', '') == '':
1873 if self._extra.get('branch', '') == '':
1874 self._extra['branch'] = 'default'
1874 self._extra['branch'] = 'default'
1875
1875
1876 if editor:
1876 if editor:
1877 self._text = editor(self._repo, self, [])
1877 self._text = editor(self._repo, self, [])
1878 self._repo.savecommitmessage(self._text)
1878 self._repo.savecommitmessage(self._text)
1879
1879
1880 def filectx(self, path, filelog=None):
1880 def filectx(self, path, filelog=None):
1881 """get a file context from the working directory
1881 """get a file context from the working directory
1882
1882
1883 Returns None if file doesn't exist and should be removed."""
1883 Returns None if file doesn't exist and should be removed."""
1884 return self._filectxfn(self._repo, self, path)
1884 return self._filectxfn(self._repo, self, path)
1885
1885
1886 def commit(self):
1886 def commit(self):
1887 """commit context to the repo"""
1887 """commit context to the repo"""
1888 return self._repo.commitctx(self)
1888 return self._repo.commitctx(self)
1889
1889
1890 @propertycache
1890 @propertycache
1891 def _manifest(self):
1891 def _manifest(self):
1892 """generate a manifest based on the return values of filectxfn"""
1892 """generate a manifest based on the return values of filectxfn"""
1893
1893
1894 # keep this simple for now; just worry about p1
1894 # keep this simple for now; just worry about p1
1895 pctx = self._parents[0]
1895 pctx = self._parents[0]
1896 man = pctx.manifest().copy()
1896 man = pctx.manifest().copy()
1897
1897
1898 for f in self._status.modified:
1898 for f in self._status.modified:
1899 p1node = nullid
1899 p1node = nullid
1900 p2node = nullid
1900 p2node = nullid
1901 p = pctx[f].parents() # if file isn't in pctx, check p2?
1901 p = pctx[f].parents() # if file isn't in pctx, check p2?
1902 if len(p) > 0:
1902 if len(p) > 0:
1903 p1node = p[0].filenode()
1903 p1node = p[0].filenode()
1904 if len(p) > 1:
1904 if len(p) > 1:
1905 p2node = p[1].filenode()
1905 p2node = p[1].filenode()
1906 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1906 man[f] = revlog.hash(self[f].data(), p1node, p2node)
1907
1907
1908 for f in self._status.added:
1908 for f in self._status.added:
1909 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1909 man[f] = revlog.hash(self[f].data(), nullid, nullid)
1910
1910
1911 for f in self._status.removed:
1911 for f in self._status.removed:
1912 if f in man:
1912 if f in man:
1913 del man[f]
1913 del man[f]
1914
1914
1915 return man
1915 return man
1916
1916
1917 @propertycache
1917 @propertycache
1918 def _status(self):
1918 def _status(self):
1919 """Calculate exact status from ``files`` specified at construction
1919 """Calculate exact status from ``files`` specified at construction
1920 """
1920 """
1921 man1 = self.p1().manifest()
1921 man1 = self.p1().manifest()
1922 p2 = self._parents[1]
1922 p2 = self._parents[1]
1923 # "1 < len(self._parents)" can't be used for checking
1923 # "1 < len(self._parents)" can't be used for checking
1924 # existence of the 2nd parent, because "memctx._parents" is
1924 # existence of the 2nd parent, because "memctx._parents" is
1925 # explicitly initialized by the list, of which length is 2.
1925 # explicitly initialized by the list, of which length is 2.
1926 if p2.node() != nullid:
1926 if p2.node() != nullid:
1927 man2 = p2.manifest()
1927 man2 = p2.manifest()
1928 managing = lambda f: f in man1 or f in man2
1928 managing = lambda f: f in man1 or f in man2
1929 else:
1929 else:
1930 managing = lambda f: f in man1
1930 managing = lambda f: f in man1
1931
1931
1932 modified, added, removed = [], [], []
1932 modified, added, removed = [], [], []
1933 for f in self._files:
1933 for f in self._files:
1934 if not managing(f):
1934 if not managing(f):
1935 added.append(f)
1935 added.append(f)
1936 elif self[f]:
1936 elif self[f]:
1937 modified.append(f)
1937 modified.append(f)
1938 else:
1938 else:
1939 removed.append(f)
1939 removed.append(f)
1940
1940
1941 return scmutil.status(modified, added, removed, [], [], [], [])
1941 return scmutil.status(modified, added, removed, [], [], [], [])
1942
1942
1943 class memfilectx(committablefilectx):
1943 class memfilectx(committablefilectx):
1944 """memfilectx represents an in-memory file to commit.
1944 """memfilectx represents an in-memory file to commit.
1945
1945
1946 See memctx and committablefilectx for more details.
1946 See memctx and committablefilectx for more details.
1947 """
1947 """
1948 def __init__(self, repo, path, data, islink=False,
1948 def __init__(self, repo, path, data, islink=False,
1949 isexec=False, copied=None, memctx=None):
1949 isexec=False, copied=None, memctx=None):
1950 """
1950 """
1951 path is the normalized file path relative to repository root.
1951 path is the normalized file path relative to repository root.
1952 data is the file content as a string.
1952 data is the file content as a string.
1953 islink is True if the file is a symbolic link.
1953 islink is True if the file is a symbolic link.
1954 isexec is True if the file is executable.
1954 isexec is True if the file is executable.
1955 copied is the source file path if current file was copied in the
1955 copied is the source file path if current file was copied in the
1956 revision being committed, or None."""
1956 revision being committed, or None."""
1957 super(memfilectx, self).__init__(repo, path, None, memctx)
1957 super(memfilectx, self).__init__(repo, path, None, memctx)
1958 self._data = data
1958 self._data = data
1959 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1959 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1960 self._copied = None
1960 self._copied = None
1961 if copied:
1961 if copied:
1962 self._copied = (copied, nullid)
1962 self._copied = (copied, nullid)
1963
1963
1964 def data(self):
1964 def data(self):
1965 return self._data
1965 return self._data
1966 def size(self):
1966 def size(self):
1967 return len(self.data())
1967 return len(self.data())
1968 def flags(self):
1968 def flags(self):
1969 return self._flags
1969 return self._flags
1970 def renamed(self):
1970 def renamed(self):
1971 return self._copied
1971 return self._copied
1972
1972
1973 def remove(self, ignoremissing=False):
1973 def remove(self, ignoremissing=False):
1974 """wraps unlink for a repo's working directory"""
1974 """wraps unlink for a repo's working directory"""
1975 # need to figure out what to do here
1975 # need to figure out what to do here
1976 del self._changectx[self._path]
1976 del self._changectx[self._path]
1977
1977
1978 def write(self, data, flags):
1978 def write(self, data, flags):
1979 """wraps repo.wwrite"""
1979 """wraps repo.wwrite"""
1980 self._data = data
1980 self._data = data
@@ -1,1008 +1,1012
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import os
12 import os
13 import shutil
13 import shutil
14
14
15 from .i18n import _
15 from .i18n import _
16 from .node import nullid
16 from .node import nullid
17
17
18 from . import (
18 from . import (
19 bookmarks,
19 bookmarks,
20 bundlerepo,
20 bundlerepo,
21 cmdutil,
21 cmdutil,
22 destutil,
22 destutil,
23 discovery,
23 discovery,
24 error,
24 error,
25 exchange,
25 exchange,
26 extensions,
26 extensions,
27 httppeer,
27 httppeer,
28 localrepo,
28 localrepo,
29 lock,
29 lock,
30 merge as mergemod,
30 merge as mergemod,
31 node,
31 node,
32 phases,
32 phases,
33 repoview,
33 repoview,
34 scmutil,
34 scmutil,
35 sshpeer,
35 sshpeer,
36 statichttprepo,
36 statichttprepo,
37 ui as uimod,
37 ui as uimod,
38 unionrepo,
38 unionrepo,
39 url,
39 url,
40 util,
40 util,
41 verify as verifymod,
41 verify as verifymod,
42 )
42 )
43
43
44 release = lock.release
44 release = lock.release
45
45
46 def _local(path):
46 def _local(path):
47 path = util.expandpath(util.urllocalpath(path))
47 path = util.expandpath(util.urllocalpath(path))
48 return (os.path.isfile(path) and bundlerepo or localrepo)
48 return (os.path.isfile(path) and bundlerepo or localrepo)
49
49
50 def addbranchrevs(lrepo, other, branches, revs):
50 def addbranchrevs(lrepo, other, branches, revs):
51 peer = other.peer() # a courtesy to callers using a localrepo for other
51 peer = other.peer() # a courtesy to callers using a localrepo for other
52 hashbranch, branches = branches
52 hashbranch, branches = branches
53 if not hashbranch and not branches:
53 if not hashbranch and not branches:
54 x = revs or None
54 x = revs or None
55 if util.safehasattr(revs, 'first'):
55 if util.safehasattr(revs, 'first'):
56 y = revs.first()
56 y = revs.first()
57 elif revs:
57 elif revs:
58 y = revs[0]
58 y = revs[0]
59 else:
59 else:
60 y = None
60 y = None
61 return x, y
61 return x, y
62 if revs:
62 if revs:
63 revs = list(revs)
63 revs = list(revs)
64 else:
64 else:
65 revs = []
65 revs = []
66
66
67 if not peer.capable('branchmap'):
67 if not peer.capable('branchmap'):
68 if branches:
68 if branches:
69 raise error.Abort(_("remote branch lookup not supported"))
69 raise error.Abort(_("remote branch lookup not supported"))
70 revs.append(hashbranch)
70 revs.append(hashbranch)
71 return revs, revs[0]
71 return revs, revs[0]
72 branchmap = peer.branchmap()
72 branchmap = peer.branchmap()
73
73
74 def primary(branch):
74 def primary(branch):
75 if branch == '.':
75 if branch == '.':
76 if not lrepo:
76 if not lrepo:
77 raise error.Abort(_("dirstate branch not accessible"))
77 raise error.Abort(_("dirstate branch not accessible"))
78 branch = lrepo.dirstate.branch()
78 branch = lrepo.dirstate.branch()
79 if branch in branchmap:
79 if branch in branchmap:
80 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
80 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
81 return True
81 return True
82 else:
82 else:
83 return False
83 return False
84
84
85 for branch in branches:
85 for branch in branches:
86 if not primary(branch):
86 if not primary(branch):
87 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
87 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
88 if hashbranch:
88 if hashbranch:
89 if not primary(hashbranch):
89 if not primary(hashbranch):
90 revs.append(hashbranch)
90 revs.append(hashbranch)
91 return revs, revs[0]
91 return revs, revs[0]
92
92
93 def parseurl(path, branches=None):
93 def parseurl(path, branches=None):
94 '''parse url#branch, returning (url, (branch, branches))'''
94 '''parse url#branch, returning (url, (branch, branches))'''
95
95
96 u = util.url(path)
96 u = util.url(path)
97 branch = None
97 branch = None
98 if u.fragment:
98 if u.fragment:
99 branch = u.fragment
99 branch = u.fragment
100 u.fragment = None
100 u.fragment = None
101 return str(u), (branch, branches or [])
101 return str(u), (branch, branches or [])
102
102
103 schemes = {
103 schemes = {
104 'bundle': bundlerepo,
104 'bundle': bundlerepo,
105 'union': unionrepo,
105 'union': unionrepo,
106 'file': _local,
106 'file': _local,
107 'http': httppeer,
107 'http': httppeer,
108 'https': httppeer,
108 'https': httppeer,
109 'ssh': sshpeer,
109 'ssh': sshpeer,
110 'static-http': statichttprepo,
110 'static-http': statichttprepo,
111 }
111 }
112
112
113 def _peerlookup(path):
113 def _peerlookup(path):
114 u = util.url(path)
114 u = util.url(path)
115 scheme = u.scheme or 'file'
115 scheme = u.scheme or 'file'
116 thing = schemes.get(scheme) or schemes['file']
116 thing = schemes.get(scheme) or schemes['file']
117 try:
117 try:
118 return thing(path)
118 return thing(path)
119 except TypeError:
119 except TypeError:
120 # we can't test callable(thing) because 'thing' can be an unloaded
120 # we can't test callable(thing) because 'thing' can be an unloaded
121 # module that implements __call__
121 # module that implements __call__
122 if not util.safehasattr(thing, 'instance'):
122 if not util.safehasattr(thing, 'instance'):
123 raise
123 raise
124 return thing
124 return thing
125
125
126 def islocal(repo):
126 def islocal(repo):
127 '''return true if repo (or path pointing to repo) is local'''
127 '''return true if repo (or path pointing to repo) is local'''
128 if isinstance(repo, str):
128 if isinstance(repo, str):
129 try:
129 try:
130 return _peerlookup(repo).islocal(repo)
130 return _peerlookup(repo).islocal(repo)
131 except AttributeError:
131 except AttributeError:
132 return False
132 return False
133 return repo.local()
133 return repo.local()
134
134
135 def openpath(ui, path):
135 def openpath(ui, path):
136 '''open path with open if local, url.open if remote'''
136 '''open path with open if local, url.open if remote'''
137 pathurl = util.url(path, parsequery=False, parsefragment=False)
137 pathurl = util.url(path, parsequery=False, parsefragment=False)
138 if pathurl.islocal():
138 if pathurl.islocal():
139 return util.posixfile(pathurl.localpath(), 'rb')
139 return util.posixfile(pathurl.localpath(), 'rb')
140 else:
140 else:
141 return url.open(ui, path)
141 return url.open(ui, path)
142
142
143 # a list of (ui, repo) functions called for wire peer initialization
143 # a list of (ui, repo) functions called for wire peer initialization
144 wirepeersetupfuncs = []
144 wirepeersetupfuncs = []
145
145
146 def _peerorrepo(ui, path, create=False):
146 def _peerorrepo(ui, path, create=False):
147 """return a repository object for the specified path"""
147 """return a repository object for the specified path"""
148 obj = _peerlookup(path).instance(ui, path, create)
148 obj = _peerlookup(path).instance(ui, path, create)
149 ui = getattr(obj, "ui", ui)
149 ui = getattr(obj, "ui", ui)
150 for name, module in extensions.extensions(ui):
150 for name, module in extensions.extensions(ui):
151 hook = getattr(module, 'reposetup', None)
151 hook = getattr(module, 'reposetup', None)
152 if hook:
152 if hook:
153 hook(ui, obj)
153 hook(ui, obj)
154 if not obj.local():
154 if not obj.local():
155 for f in wirepeersetupfuncs:
155 for f in wirepeersetupfuncs:
156 f(ui, obj)
156 f(ui, obj)
157 return obj
157 return obj
158
158
159 def repository(ui, path='', create=False):
159 def repository(ui, path='', create=False):
160 """return a repository object for the specified path"""
160 """return a repository object for the specified path"""
161 peer = _peerorrepo(ui, path, create)
161 peer = _peerorrepo(ui, path, create)
162 repo = peer.local()
162 repo = peer.local()
163 if not repo:
163 if not repo:
164 raise error.Abort(_("repository '%s' is not local") %
164 raise error.Abort(_("repository '%s' is not local") %
165 (path or peer.url()))
165 (path or peer.url()))
166 return repo.filtered('visible')
166 return repo.filtered('visible')
167
167
168 def peer(uiorrepo, opts, path, create=False):
168 def peer(uiorrepo, opts, path, create=False):
169 '''return a repository peer for the specified path'''
169 '''return a repository peer for the specified path'''
170 rui = remoteui(uiorrepo, opts)
170 rui = remoteui(uiorrepo, opts)
171 return _peerorrepo(rui, path, create).peer()
171 return _peerorrepo(rui, path, create).peer()
172
172
173 def defaultdest(source):
173 def defaultdest(source):
174 '''return default destination of clone if none is given
174 '''return default destination of clone if none is given
175
175
176 >>> defaultdest('foo')
176 >>> defaultdest('foo')
177 'foo'
177 'foo'
178 >>> defaultdest('/foo/bar')
178 >>> defaultdest('/foo/bar')
179 'bar'
179 'bar'
180 >>> defaultdest('/')
180 >>> defaultdest('/')
181 ''
181 ''
182 >>> defaultdest('')
182 >>> defaultdest('')
183 ''
183 ''
184 >>> defaultdest('http://example.org/')
184 >>> defaultdest('http://example.org/')
185 ''
185 ''
186 >>> defaultdest('http://example.org/foo/')
186 >>> defaultdest('http://example.org/foo/')
187 'foo'
187 'foo'
188 '''
188 '''
189 path = util.url(source).path
189 path = util.url(source).path
190 if not path:
190 if not path:
191 return ''
191 return ''
192 return os.path.basename(os.path.normpath(path))
192 return os.path.basename(os.path.normpath(path))
193
193
194 def share(ui, source, dest=None, update=True, bookmarks=True):
194 def share(ui, source, dest=None, update=True, bookmarks=True):
195 '''create a shared repository'''
195 '''create a shared repository'''
196
196
197 if not islocal(source):
197 if not islocal(source):
198 raise error.Abort(_('can only share local repositories'))
198 raise error.Abort(_('can only share local repositories'))
199
199
200 if not dest:
200 if not dest:
201 dest = defaultdest(source)
201 dest = defaultdest(source)
202 else:
202 else:
203 dest = ui.expandpath(dest)
203 dest = ui.expandpath(dest)
204
204
205 if isinstance(source, str):
205 if isinstance(source, str):
206 origsource = ui.expandpath(source)
206 origsource = ui.expandpath(source)
207 source, branches = parseurl(origsource)
207 source, branches = parseurl(origsource)
208 srcrepo = repository(ui, source)
208 srcrepo = repository(ui, source)
209 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
209 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
210 else:
210 else:
211 srcrepo = source.local()
211 srcrepo = source.local()
212 origsource = source = srcrepo.url()
212 origsource = source = srcrepo.url()
213 checkout = None
213 checkout = None
214
214
215 sharedpath = srcrepo.sharedpath # if our source is already sharing
215 sharedpath = srcrepo.sharedpath # if our source is already sharing
216
216
217 destwvfs = scmutil.vfs(dest, realpath=True)
217 destwvfs = scmutil.vfs(dest, realpath=True)
218 destvfs = scmutil.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
218 destvfs = scmutil.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
219
219
220 if destvfs.lexists():
220 if destvfs.lexists():
221 raise error.Abort(_('destination already exists'))
221 raise error.Abort(_('destination already exists'))
222
222
223 if not destwvfs.isdir():
223 if not destwvfs.isdir():
224 destwvfs.mkdir()
224 destwvfs.mkdir()
225 destvfs.makedir()
225 destvfs.makedir()
226
226
227 requirements = ''
227 requirements = ''
228 try:
228 try:
229 requirements = srcrepo.vfs.read('requires')
229 requirements = srcrepo.vfs.read('requires')
230 except IOError as inst:
230 except IOError as inst:
231 if inst.errno != errno.ENOENT:
231 if inst.errno != errno.ENOENT:
232 raise
232 raise
233
233
234 requirements += 'shared\n'
234 requirements += 'shared\n'
235 destvfs.write('requires', requirements)
235 destvfs.write('requires', requirements)
236 destvfs.write('sharedpath', sharedpath)
236 destvfs.write('sharedpath', sharedpath)
237
237
238 r = repository(ui, destwvfs.base)
238 r = repository(ui, destwvfs.base)
239 postshare(srcrepo, r, bookmarks=bookmarks)
239 postshare(srcrepo, r, bookmarks=bookmarks)
240 _postshareupdate(r, update, checkout=checkout)
240 _postshareupdate(r, update, checkout=checkout)
241
241
242 def postshare(sourcerepo, destrepo, bookmarks=True):
242 def postshare(sourcerepo, destrepo, bookmarks=True):
243 """Called after a new shared repo is created.
243 """Called after a new shared repo is created.
244
244
245 The new repo only has a requirements file and pointer to the source.
245 The new repo only has a requirements file and pointer to the source.
246 This function configures additional shared data.
246 This function configures additional shared data.
247
247
248 Extensions can wrap this function and write additional entries to
248 Extensions can wrap this function and write additional entries to
249 destrepo/.hg/shared to indicate additional pieces of data to be shared.
249 destrepo/.hg/shared to indicate additional pieces of data to be shared.
250 """
250 """
251 default = sourcerepo.ui.config('paths', 'default')
251 default = sourcerepo.ui.config('paths', 'default')
252 if default:
252 if default:
253 fp = destrepo.vfs("hgrc", "w", text=True)
253 fp = destrepo.vfs("hgrc", "w", text=True)
254 fp.write("[paths]\n")
254 fp.write("[paths]\n")
255 fp.write("default = %s\n" % default)
255 fp.write("default = %s\n" % default)
256 fp.close()
256 fp.close()
257
257
258 if bookmarks:
258 if bookmarks:
259 fp = destrepo.vfs('shared', 'w')
259 fp = destrepo.vfs('shared', 'w')
260 fp.write('bookmarks\n')
260 fp.write('bookmarks\n')
261 fp.close()
261 fp.close()
262
262
263 def _postshareupdate(repo, update, checkout=None):
263 def _postshareupdate(repo, update, checkout=None):
264 """Maybe perform a working directory update after a shared repo is created.
264 """Maybe perform a working directory update after a shared repo is created.
265
265
266 ``update`` can be a boolean or a revision to update to.
266 ``update`` can be a boolean or a revision to update to.
267 """
267 """
268 if not update:
268 if not update:
269 return
269 return
270
270
271 repo.ui.status(_("updating working directory\n"))
271 repo.ui.status(_("updating working directory\n"))
272 if update is not True:
272 if update is not True:
273 checkout = update
273 checkout = update
274 for test in (checkout, 'default', 'tip'):
274 for test in (checkout, 'default', 'tip'):
275 if test is None:
275 if test is None:
276 continue
276 continue
277 try:
277 try:
278 uprev = repo.lookup(test)
278 uprev = repo.lookup(test)
279 break
279 break
280 except error.RepoLookupError:
280 except error.RepoLookupError:
281 continue
281 continue
282 _update(repo, uprev)
282 _update(repo, uprev)
283
283
284 def copystore(ui, srcrepo, destpath):
284 def copystore(ui, srcrepo, destpath):
285 '''copy files from store of srcrepo in destpath
285 '''copy files from store of srcrepo in destpath
286
286
287 returns destlock
287 returns destlock
288 '''
288 '''
289 destlock = None
289 destlock = None
290 try:
290 try:
291 hardlink = None
291 hardlink = None
292 num = 0
292 num = 0
293 closetopic = [None]
293 closetopic = [None]
294 def prog(topic, pos):
294 def prog(topic, pos):
295 if pos is None:
295 if pos is None:
296 closetopic[0] = topic
296 closetopic[0] = topic
297 else:
297 else:
298 ui.progress(topic, pos + num)
298 ui.progress(topic, pos + num)
299 srcpublishing = srcrepo.publishing()
299 srcpublishing = srcrepo.publishing()
300 srcvfs = scmutil.vfs(srcrepo.sharedpath)
300 srcvfs = scmutil.vfs(srcrepo.sharedpath)
301 dstvfs = scmutil.vfs(destpath)
301 dstvfs = scmutil.vfs(destpath)
302 for f in srcrepo.store.copylist():
302 for f in srcrepo.store.copylist():
303 if srcpublishing and f.endswith('phaseroots'):
303 if srcpublishing and f.endswith('phaseroots'):
304 continue
304 continue
305 dstbase = os.path.dirname(f)
305 dstbase = os.path.dirname(f)
306 if dstbase and not dstvfs.exists(dstbase):
306 if dstbase and not dstvfs.exists(dstbase):
307 dstvfs.mkdir(dstbase)
307 dstvfs.mkdir(dstbase)
308 if srcvfs.exists(f):
308 if srcvfs.exists(f):
309 if f.endswith('data'):
309 if f.endswith('data'):
310 # 'dstbase' may be empty (e.g. revlog format 0)
310 # 'dstbase' may be empty (e.g. revlog format 0)
311 lockfile = os.path.join(dstbase, "lock")
311 lockfile = os.path.join(dstbase, "lock")
312 # lock to avoid premature writing to the target
312 # lock to avoid premature writing to the target
313 destlock = lock.lock(dstvfs, lockfile)
313 destlock = lock.lock(dstvfs, lockfile)
314 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
314 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
315 hardlink, progress=prog)
315 hardlink, progress=prog)
316 num += n
316 num += n
317 if hardlink:
317 if hardlink:
318 ui.debug("linked %d files\n" % num)
318 ui.debug("linked %d files\n" % num)
319 if closetopic[0]:
319 if closetopic[0]:
320 ui.progress(closetopic[0], None)
320 ui.progress(closetopic[0], None)
321 else:
321 else:
322 ui.debug("copied %d files\n" % num)
322 ui.debug("copied %d files\n" % num)
323 if closetopic[0]:
323 if closetopic[0]:
324 ui.progress(closetopic[0], None)
324 ui.progress(closetopic[0], None)
325 return destlock
325 return destlock
326 except: # re-raises
326 except: # re-raises
327 release(destlock)
327 release(destlock)
328 raise
328 raise
329
329
330 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
330 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
331 rev=None, update=True, stream=False):
331 rev=None, update=True, stream=False):
332 """Perform a clone using a shared repo.
332 """Perform a clone using a shared repo.
333
333
334 The store for the repository will be located at <sharepath>/.hg. The
334 The store for the repository will be located at <sharepath>/.hg. The
335 specified revisions will be cloned or pulled from "source". A shared repo
335 specified revisions will be cloned or pulled from "source". A shared repo
336 will be created at "dest" and a working copy will be created if "update" is
336 will be created at "dest" and a working copy will be created if "update" is
337 True.
337 True.
338 """
338 """
339 revs = None
339 revs = None
340 if rev:
340 if rev:
341 if not srcpeer.capable('lookup'):
341 if not srcpeer.capable('lookup'):
342 raise error.Abort(_("src repository does not support "
342 raise error.Abort(_("src repository does not support "
343 "revision lookup and so doesn't "
343 "revision lookup and so doesn't "
344 "support clone by revision"))
344 "support clone by revision"))
345 revs = [srcpeer.lookup(r) for r in rev]
345 revs = [srcpeer.lookup(r) for r in rev]
346
346
347 # Obtain a lock before checking for or cloning the pooled repo otherwise
347 # Obtain a lock before checking for or cloning the pooled repo otherwise
348 # 2 clients may race creating or populating it.
348 # 2 clients may race creating or populating it.
349 pooldir = os.path.dirname(sharepath)
349 pooldir = os.path.dirname(sharepath)
350 # lock class requires the directory to exist.
350 # lock class requires the directory to exist.
351 try:
351 try:
352 util.makedir(pooldir, False)
352 util.makedir(pooldir, False)
353 except OSError as e:
353 except OSError as e:
354 if e.errno != errno.EEXIST:
354 if e.errno != errno.EEXIST:
355 raise
355 raise
356
356
357 poolvfs = scmutil.vfs(pooldir)
357 poolvfs = scmutil.vfs(pooldir)
358 basename = os.path.basename(sharepath)
358 basename = os.path.basename(sharepath)
359
359
360 with lock.lock(poolvfs, '%s.lock' % basename):
360 with lock.lock(poolvfs, '%s.lock' % basename):
361 if os.path.exists(sharepath):
361 if os.path.exists(sharepath):
362 ui.status(_('(sharing from existing pooled repository %s)\n') %
362 ui.status(_('(sharing from existing pooled repository %s)\n') %
363 basename)
363 basename)
364 else:
364 else:
365 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
365 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
366 # Always use pull mode because hardlinks in share mode don't work
366 # Always use pull mode because hardlinks in share mode don't work
367 # well. Never update because working copies aren't necessary in
367 # well. Never update because working copies aren't necessary in
368 # share mode.
368 # share mode.
369 clone(ui, peeropts, source, dest=sharepath, pull=True,
369 clone(ui, peeropts, source, dest=sharepath, pull=True,
370 rev=rev, update=False, stream=stream)
370 rev=rev, update=False, stream=stream)
371
371
372 sharerepo = repository(ui, path=sharepath)
372 sharerepo = repository(ui, path=sharepath)
373 share(ui, sharerepo, dest=dest, update=False, bookmarks=False)
373 share(ui, sharerepo, dest=dest, update=False, bookmarks=False)
374
374
375 # We need to perform a pull against the dest repo to fetch bookmarks
375 # We need to perform a pull against the dest repo to fetch bookmarks
376 # and other non-store data that isn't shared by default. In the case of
376 # and other non-store data that isn't shared by default. In the case of
377 # non-existing shared repo, this means we pull from the remote twice. This
377 # non-existing shared repo, this means we pull from the remote twice. This
378 # is a bit weird. But at the time it was implemented, there wasn't an easy
378 # is a bit weird. But at the time it was implemented, there wasn't an easy
379 # way to pull just non-changegroup data.
379 # way to pull just non-changegroup data.
380 destrepo = repository(ui, path=dest)
380 destrepo = repository(ui, path=dest)
381 exchange.pull(destrepo, srcpeer, heads=revs)
381 exchange.pull(destrepo, srcpeer, heads=revs)
382
382
383 _postshareupdate(destrepo, update)
383 _postshareupdate(destrepo, update)
384
384
385 return srcpeer, peer(ui, peeropts, dest)
385 return srcpeer, peer(ui, peeropts, dest)
386
386
387 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
387 def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
388 update=True, stream=False, branch=None, shareopts=None):
388 update=True, stream=False, branch=None, shareopts=None):
389 """Make a copy of an existing repository.
389 """Make a copy of an existing repository.
390
390
391 Create a copy of an existing repository in a new directory. The
391 Create a copy of an existing repository in a new directory. The
392 source and destination are URLs, as passed to the repository
392 source and destination are URLs, as passed to the repository
393 function. Returns a pair of repository peers, the source and
393 function. Returns a pair of repository peers, the source and
394 newly created destination.
394 newly created destination.
395
395
396 The location of the source is added to the new repository's
396 The location of the source is added to the new repository's
397 .hg/hgrc file, as the default to be used for future pulls and
397 .hg/hgrc file, as the default to be used for future pulls and
398 pushes.
398 pushes.
399
399
400 If an exception is raised, the partly cloned/updated destination
400 If an exception is raised, the partly cloned/updated destination
401 repository will be deleted.
401 repository will be deleted.
402
402
403 Arguments:
403 Arguments:
404
404
405 source: repository object or URL
405 source: repository object or URL
406
406
407 dest: URL of destination repository to create (defaults to base
407 dest: URL of destination repository to create (defaults to base
408 name of source repository)
408 name of source repository)
409
409
410 pull: always pull from source repository, even in local case or if the
410 pull: always pull from source repository, even in local case or if the
411 server prefers streaming
411 server prefers streaming
412
412
413 stream: stream raw data uncompressed from repository (fast over
413 stream: stream raw data uncompressed from repository (fast over
414 LAN, slow over WAN)
414 LAN, slow over WAN)
415
415
416 rev: revision to clone up to (implies pull=True)
416 rev: revision to clone up to (implies pull=True)
417
417
418 update: update working directory after clone completes, if
418 update: update working directory after clone completes, if
419 destination is local repository (True means update to default rev,
419 destination is local repository (True means update to default rev,
420 anything else is treated as a revision)
420 anything else is treated as a revision)
421
421
422 branch: branches to clone
422 branch: branches to clone
423
423
424 shareopts: dict of options to control auto sharing behavior. The "pool" key
424 shareopts: dict of options to control auto sharing behavior. The "pool" key
425 activates auto sharing mode and defines the directory for stores. The
425 activates auto sharing mode and defines the directory for stores. The
426 "mode" key determines how to construct the directory name of the shared
426 "mode" key determines how to construct the directory name of the shared
427 repository. "identity" means the name is derived from the node of the first
427 repository. "identity" means the name is derived from the node of the first
428 changeset in the repository. "remote" means the name is derived from the
428 changeset in the repository. "remote" means the name is derived from the
429 remote's path/URL. Defaults to "identity."
429 remote's path/URL. Defaults to "identity."
430 """
430 """
431
431
432 if isinstance(source, str):
432 if isinstance(source, str):
433 origsource = ui.expandpath(source)
433 origsource = ui.expandpath(source)
434 source, branch = parseurl(origsource, branch)
434 source, branch = parseurl(origsource, branch)
435 srcpeer = peer(ui, peeropts, source)
435 srcpeer = peer(ui, peeropts, source)
436 else:
436 else:
437 srcpeer = source.peer() # in case we were called with a localrepo
437 srcpeer = source.peer() # in case we were called with a localrepo
438 branch = (None, branch or [])
438 branch = (None, branch or [])
439 origsource = source = srcpeer.url()
439 origsource = source = srcpeer.url()
440 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
440 rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
441
441
442 if dest is None:
442 if dest is None:
443 dest = defaultdest(source)
443 dest = defaultdest(source)
444 if dest:
444 if dest:
445 ui.status(_("destination directory: %s\n") % dest)
445 ui.status(_("destination directory: %s\n") % dest)
446 else:
446 else:
447 dest = ui.expandpath(dest)
447 dest = ui.expandpath(dest)
448
448
449 dest = util.urllocalpath(dest)
449 dest = util.urllocalpath(dest)
450 source = util.urllocalpath(source)
450 source = util.urllocalpath(source)
451
451
452 if not dest:
452 if not dest:
453 raise error.Abort(_("empty destination path is not valid"))
453 raise error.Abort(_("empty destination path is not valid"))
454
454
455 destvfs = scmutil.vfs(dest, expandpath=True)
455 destvfs = scmutil.vfs(dest, expandpath=True)
456 if destvfs.lexists():
456 if destvfs.lexists():
457 if not destvfs.isdir():
457 if not destvfs.isdir():
458 raise error.Abort(_("destination '%s' already exists") % dest)
458 raise error.Abort(_("destination '%s' already exists") % dest)
459 elif destvfs.listdir():
459 elif destvfs.listdir():
460 raise error.Abort(_("destination '%s' is not empty") % dest)
460 raise error.Abort(_("destination '%s' is not empty") % dest)
461
461
462 shareopts = shareopts or {}
462 shareopts = shareopts or {}
463 sharepool = shareopts.get('pool')
463 sharepool = shareopts.get('pool')
464 sharenamemode = shareopts.get('mode')
464 sharenamemode = shareopts.get('mode')
465 if sharepool and islocal(dest):
465 if sharepool and islocal(dest):
466 sharepath = None
466 sharepath = None
467 if sharenamemode == 'identity':
467 if sharenamemode == 'identity':
468 # Resolve the name from the initial changeset in the remote
468 # Resolve the name from the initial changeset in the remote
469 # repository. This returns nullid when the remote is empty. It
469 # repository. This returns nullid when the remote is empty. It
470 # raises RepoLookupError if revision 0 is filtered or otherwise
470 # raises RepoLookupError if revision 0 is filtered or otherwise
471 # not available. If we fail to resolve, sharing is not enabled.
471 # not available. If we fail to resolve, sharing is not enabled.
472 try:
472 try:
473 rootnode = srcpeer.lookup('0')
473 rootnode = srcpeer.lookup('0')
474 if rootnode != node.nullid:
474 if rootnode != node.nullid:
475 sharepath = os.path.join(sharepool, node.hex(rootnode))
475 sharepath = os.path.join(sharepool, node.hex(rootnode))
476 else:
476 else:
477 ui.status(_('(not using pooled storage: '
477 ui.status(_('(not using pooled storage: '
478 'remote appears to be empty)\n'))
478 'remote appears to be empty)\n'))
479 except error.RepoLookupError:
479 except error.RepoLookupError:
480 ui.status(_('(not using pooled storage: '
480 ui.status(_('(not using pooled storage: '
481 'unable to resolve identity of remote)\n'))
481 'unable to resolve identity of remote)\n'))
482 elif sharenamemode == 'remote':
482 elif sharenamemode == 'remote':
483 sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
483 sharepath = os.path.join(sharepool, util.sha1(source).hexdigest())
484 else:
484 else:
485 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
485 raise error.Abort('unknown share naming mode: %s' % sharenamemode)
486
486
487 if sharepath:
487 if sharepath:
488 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
488 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
489 dest, pull=pull, rev=rev, update=update,
489 dest, pull=pull, rev=rev, update=update,
490 stream=stream)
490 stream=stream)
491
491
492 srclock = destlock = cleandir = None
492 srclock = destlock = cleandir = None
493 srcrepo = srcpeer.local()
493 srcrepo = srcpeer.local()
494 try:
494 try:
495 abspath = origsource
495 abspath = origsource
496 if islocal(origsource):
496 if islocal(origsource):
497 abspath = os.path.abspath(util.urllocalpath(origsource))
497 abspath = os.path.abspath(util.urllocalpath(origsource))
498
498
499 if islocal(dest):
499 if islocal(dest):
500 cleandir = dest
500 cleandir = dest
501
501
502 copy = False
502 copy = False
503 if (srcrepo and srcrepo.cancopy() and islocal(dest)
503 if (srcrepo and srcrepo.cancopy() and islocal(dest)
504 and not phases.hassecret(srcrepo)):
504 and not phases.hassecret(srcrepo)):
505 copy = not pull and not rev
505 copy = not pull and not rev
506
506
507 if copy:
507 if copy:
508 try:
508 try:
509 # we use a lock here because if we race with commit, we
509 # we use a lock here because if we race with commit, we
510 # can end up with extra data in the cloned revlogs that's
510 # can end up with extra data in the cloned revlogs that's
511 # not pointed to by changesets, thus causing verify to
511 # not pointed to by changesets, thus causing verify to
512 # fail
512 # fail
513 srclock = srcrepo.lock(wait=False)
513 srclock = srcrepo.lock(wait=False)
514 except error.LockError:
514 except error.LockError:
515 copy = False
515 copy = False
516
516
517 if copy:
517 if copy:
518 srcrepo.hook('preoutgoing', throw=True, source='clone')
518 srcrepo.hook('preoutgoing', throw=True, source='clone')
519 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
519 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
520 if not os.path.exists(dest):
520 if not os.path.exists(dest):
521 os.mkdir(dest)
521 os.mkdir(dest)
522 else:
522 else:
523 # only clean up directories we create ourselves
523 # only clean up directories we create ourselves
524 cleandir = hgdir
524 cleandir = hgdir
525 try:
525 try:
526 destpath = hgdir
526 destpath = hgdir
527 util.makedir(destpath, notindexed=True)
527 util.makedir(destpath, notindexed=True)
528 except OSError as inst:
528 except OSError as inst:
529 if inst.errno == errno.EEXIST:
529 if inst.errno == errno.EEXIST:
530 cleandir = None
530 cleandir = None
531 raise error.Abort(_("destination '%s' already exists")
531 raise error.Abort(_("destination '%s' already exists")
532 % dest)
532 % dest)
533 raise
533 raise
534
534
535 destlock = copystore(ui, srcrepo, destpath)
535 destlock = copystore(ui, srcrepo, destpath)
536 # copy bookmarks over
536 # copy bookmarks over
537 srcbookmarks = srcrepo.join('bookmarks')
537 srcbookmarks = srcrepo.join('bookmarks')
538 dstbookmarks = os.path.join(destpath, 'bookmarks')
538 dstbookmarks = os.path.join(destpath, 'bookmarks')
539 if os.path.exists(srcbookmarks):
539 if os.path.exists(srcbookmarks):
540 util.copyfile(srcbookmarks, dstbookmarks)
540 util.copyfile(srcbookmarks, dstbookmarks)
541
541
542 # Recomputing branch cache might be slow on big repos,
542 # Recomputing branch cache might be slow on big repos,
543 # so just copy it
543 # so just copy it
544 def copybranchcache(fname):
544 def copybranchcache(fname):
545 srcbranchcache = srcrepo.join('cache/%s' % fname)
545 srcbranchcache = srcrepo.join('cache/%s' % fname)
546 dstbranchcache = os.path.join(dstcachedir, fname)
546 dstbranchcache = os.path.join(dstcachedir, fname)
547 if os.path.exists(srcbranchcache):
547 if os.path.exists(srcbranchcache):
548 if not os.path.exists(dstcachedir):
548 if not os.path.exists(dstcachedir):
549 os.mkdir(dstcachedir)
549 os.mkdir(dstcachedir)
550 util.copyfile(srcbranchcache, dstbranchcache)
550 util.copyfile(srcbranchcache, dstbranchcache)
551
551
552 dstcachedir = os.path.join(destpath, 'cache')
552 dstcachedir = os.path.join(destpath, 'cache')
553 # In local clones we're copying all nodes, not just served
553 # In local clones we're copying all nodes, not just served
554 # ones. Therefore copy all branch caches over.
554 # ones. Therefore copy all branch caches over.
555 copybranchcache('branch2')
555 copybranchcache('branch2')
556 for cachename in repoview.filtertable:
556 for cachename in repoview.filtertable:
557 copybranchcache('branch2-%s' % cachename)
557 copybranchcache('branch2-%s' % cachename)
558
558
559 # we need to re-init the repo after manually copying the data
559 # we need to re-init the repo after manually copying the data
560 # into it
560 # into it
561 destpeer = peer(srcrepo, peeropts, dest)
561 destpeer = peer(srcrepo, peeropts, dest)
562 srcrepo.hook('outgoing', source='clone',
562 srcrepo.hook('outgoing', source='clone',
563 node=node.hex(node.nullid))
563 node=node.hex(node.nullid))
564 else:
564 else:
565 try:
565 try:
566 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
566 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
567 # only pass ui when no srcrepo
567 # only pass ui when no srcrepo
568 except OSError as inst:
568 except OSError as inst:
569 if inst.errno == errno.EEXIST:
569 if inst.errno == errno.EEXIST:
570 cleandir = None
570 cleandir = None
571 raise error.Abort(_("destination '%s' already exists")
571 raise error.Abort(_("destination '%s' already exists")
572 % dest)
572 % dest)
573 raise
573 raise
574
574
575 revs = None
575 revs = None
576 if rev:
576 if rev:
577 if not srcpeer.capable('lookup'):
577 if not srcpeer.capable('lookup'):
578 raise error.Abort(_("src repository does not support "
578 raise error.Abort(_("src repository does not support "
579 "revision lookup and so doesn't "
579 "revision lookup and so doesn't "
580 "support clone by revision"))
580 "support clone by revision"))
581 revs = [srcpeer.lookup(r) for r in rev]
581 revs = [srcpeer.lookup(r) for r in rev]
582 checkout = revs[0]
582 checkout = revs[0]
583 local = destpeer.local()
583 local = destpeer.local()
584 if local:
584 if local:
585 if not stream:
585 if not stream:
586 if pull:
586 if pull:
587 stream = False
587 stream = False
588 else:
588 else:
589 stream = None
589 stream = None
590 # internal config: ui.quietbookmarkmove
590 # internal config: ui.quietbookmarkmove
591 quiet = local.ui.backupconfig('ui', 'quietbookmarkmove')
591 quiet = local.ui.backupconfig('ui', 'quietbookmarkmove')
592 try:
592 try:
593 local.ui.setconfig(
593 local.ui.setconfig(
594 'ui', 'quietbookmarkmove', True, 'clone')
594 'ui', 'quietbookmarkmove', True, 'clone')
595 exchange.pull(local, srcpeer, revs,
595 exchange.pull(local, srcpeer, revs,
596 streamclonerequested=stream)
596 streamclonerequested=stream)
597 finally:
597 finally:
598 local.ui.restoreconfig(quiet)
598 local.ui.restoreconfig(quiet)
599 elif srcrepo:
599 elif srcrepo:
600 exchange.push(srcrepo, destpeer, revs=revs,
600 exchange.push(srcrepo, destpeer, revs=revs,
601 bookmarks=srcrepo._bookmarks.keys())
601 bookmarks=srcrepo._bookmarks.keys())
602 else:
602 else:
603 raise error.Abort(_("clone from remote to remote not supported")
603 raise error.Abort(_("clone from remote to remote not supported")
604 )
604 )
605
605
606 cleandir = None
606 cleandir = None
607
607
608 destrepo = destpeer.local()
608 destrepo = destpeer.local()
609 if destrepo:
609 if destrepo:
610 template = uimod.samplehgrcs['cloned']
610 template = uimod.samplehgrcs['cloned']
611 fp = destrepo.vfs("hgrc", "w", text=True)
611 fp = destrepo.vfs("hgrc", "w", text=True)
612 u = util.url(abspath)
612 u = util.url(abspath)
613 u.passwd = None
613 u.passwd = None
614 defaulturl = str(u)
614 defaulturl = str(u)
615 fp.write(template % defaulturl)
615 fp.write(template % defaulturl)
616 fp.close()
616 fp.close()
617
617
618 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
618 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
619
619
620 if update:
620 if update:
621 if update is not True:
621 if update is not True:
622 checkout = srcpeer.lookup(update)
622 checkout = srcpeer.lookup(update)
623 uprev = None
623 uprev = None
624 status = None
624 status = None
625 if checkout is not None:
625 if checkout is not None:
626 try:
626 try:
627 uprev = destrepo.lookup(checkout)
627 uprev = destrepo.lookup(checkout)
628 except error.RepoLookupError:
628 except error.RepoLookupError:
629 if update is not True:
629 if update is not True:
630 try:
630 try:
631 uprev = destrepo.lookup(update)
631 uprev = destrepo.lookup(update)
632 except error.RepoLookupError:
632 except error.RepoLookupError:
633 pass
633 pass
634 if uprev is None:
634 if uprev is None:
635 try:
635 try:
636 uprev = destrepo._bookmarks['@']
636 uprev = destrepo._bookmarks['@']
637 update = '@'
637 update = '@'
638 bn = destrepo[uprev].branch()
638 bn = destrepo[uprev].branch()
639 if bn == 'default':
639 if bn == 'default':
640 status = _("updating to bookmark @\n")
640 status = _("updating to bookmark @\n")
641 else:
641 else:
642 status = (_("updating to bookmark @ on branch %s\n")
642 status = (_("updating to bookmark @ on branch %s\n")
643 % bn)
643 % bn)
644 except KeyError:
644 except KeyError:
645 try:
645 try:
646 uprev = destrepo.branchtip('default')
646 uprev = destrepo.branchtip('default')
647 except error.RepoLookupError:
647 except error.RepoLookupError:
648 uprev = destrepo.lookup('tip')
648 uprev = destrepo.lookup('tip')
649 if not status:
649 if not status:
650 bn = destrepo[uprev].branch()
650 bn = destrepo[uprev].branch()
651 status = _("updating to branch %s\n") % bn
651 status = _("updating to branch %s\n") % bn
652 destrepo.ui.status(status)
652 destrepo.ui.status(status)
653 _update(destrepo, uprev)
653 _update(destrepo, uprev)
654 if update in destrepo._bookmarks:
654 if update in destrepo._bookmarks:
655 bookmarks.activate(destrepo, update)
655 bookmarks.activate(destrepo, update)
656 finally:
656 finally:
657 release(srclock, destlock)
657 release(srclock, destlock)
658 if cleandir is not None:
658 if cleandir is not None:
659 shutil.rmtree(cleandir, True)
659 shutil.rmtree(cleandir, True)
660 if srcpeer is not None:
660 if srcpeer is not None:
661 srcpeer.close()
661 srcpeer.close()
662 return srcpeer, destpeer
662 return srcpeer, destpeer
663
663
664 def _showstats(repo, stats, quietempty=False):
664 def _showstats(repo, stats, quietempty=False):
665 if quietempty and not any(stats):
665 if quietempty and not any(stats):
666 return
666 return
667 repo.ui.status(_("%d files updated, %d files merged, "
667 repo.ui.status(_("%d files updated, %d files merged, "
668 "%d files removed, %d files unresolved\n") % stats)
668 "%d files removed, %d files unresolved\n") % stats)
669
669
670 def updaterepo(repo, node, overwrite):
670 def updaterepo(repo, node, overwrite):
671 """Update the working directory to node.
671 """Update the working directory to node.
672
672
673 When overwrite is set, changes are clobbered, merged else
673 When overwrite is set, changes are clobbered, merged else
674
674
675 returns stats (see pydoc mercurial.merge.applyupdates)"""
675 returns stats (see pydoc mercurial.merge.applyupdates)"""
676 return mergemod.update(repo, node, False, overwrite,
676 return mergemod.update(repo, node, False, overwrite,
677 labels=['working copy', 'destination'])
677 labels=['working copy', 'destination'])
678
678
679 def update(repo, node, quietempty=False):
679 def update(repo, node, quietempty=False):
680 """update the working directory to node, merging linear changes"""
680 """update the working directory to node, merging linear changes"""
681 stats = updaterepo(repo, node, False)
681 stats = updaterepo(repo, node, False)
682 _showstats(repo, stats, quietempty)
682 _showstats(repo, stats, quietempty)
683 if stats[3]:
683 if stats[3]:
684 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
684 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
685 return stats[3] > 0
685 return stats[3] > 0
686
686
687 # naming conflict in clone()
687 # naming conflict in clone()
688 _update = update
688 _update = update
689
689
690 def clean(repo, node, show_stats=True, quietempty=False):
690 def clean(repo, node, show_stats=True, quietempty=False):
691 """forcibly switch the working directory to node, clobbering changes"""
691 """forcibly switch the working directory to node, clobbering changes"""
692 stats = updaterepo(repo, node, True)
692 stats = updaterepo(repo, node, True)
693 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
693 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
694 if show_stats:
694 if show_stats:
695 _showstats(repo, stats, quietempty)
695 _showstats(repo, stats, quietempty)
696 return stats[3] > 0
696 return stats[3] > 0
697
697
698 # naming conflict in updatetotally()
698 # naming conflict in updatetotally()
699 _clean = clean
699 _clean = clean
700
700
701 def updatetotally(ui, repo, checkout, brev, clean=False, check=False):
701 def updatetotally(ui, repo, checkout, brev, clean=False, check=False):
702 """Update the working directory with extra care for non-file components
702 """Update the working directory with extra care for non-file components
703
703
704 This takes care of non-file components below:
704 This takes care of non-file components below:
705
705
706 :bookmark: might be advanced or (in)activated
706 :bookmark: might be advanced or (in)activated
707
707
708 This takes arguments below:
708 This takes arguments below:
709
709
710 :checkout: to which revision the working directory is updated
710 :checkout: to which revision the working directory is updated
711 :brev: a name, which might be a bookmark to be activated after updating
711 :brev: a name, which might be a bookmark to be activated after updating
712 :clean: whether changes in the working directory can be discarded
712 :clean: whether changes in the working directory can be discarded
713 :check: whether changes in the working directory should be checked
713 :check: whether changes in the working directory should be checked
714
714
715 This returns whether conflict is detected at updating or not.
715 This returns whether conflict is detected at updating or not.
716 """
716 """
717 with repo.wlock():
717 with repo.wlock():
718 movemarkfrom = None
718 movemarkfrom = None
719 warndest = False
719 warndest = False
720 if checkout is None:
720 if checkout is None:
721 updata = destutil.destupdate(repo, clean=clean, check=check)
721 updata = destutil.destupdate(repo, clean=clean, check=check)
722 checkout, movemarkfrom, brev = updata
722 checkout, movemarkfrom, brev = updata
723 warndest = True
723 warndest = True
724
724
725 if clean:
725 if clean:
726 ret = _clean(repo, checkout)
726 ret = _clean(repo, checkout)
727 else:
727 else:
728 ret = _update(repo, checkout)
728 ret = _update(repo, checkout)
729
729
730 if not ret and movemarkfrom:
730 if not ret and movemarkfrom:
731 if movemarkfrom == repo['.'].node():
731 if movemarkfrom == repo['.'].node():
732 pass # no-op update
732 pass # no-op update
733 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
733 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
734 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
734 ui.status(_("updating bookmark %s\n") % repo._activebookmark)
735 else:
735 else:
736 # this can happen with a non-linear update
736 # this can happen with a non-linear update
737 ui.status(_("(leaving bookmark %s)\n") %
737 ui.status(_("(leaving bookmark %s)\n") %
738 repo._activebookmark)
738 repo._activebookmark)
739 bookmarks.deactivate(repo)
739 bookmarks.deactivate(repo)
740 elif brev in repo._bookmarks:
740 elif brev in repo._bookmarks:
741 if brev != repo._activebookmark:
741 if brev != repo._activebookmark:
742 ui.status(_("(activating bookmark %s)\n") % brev)
742 ui.status(_("(activating bookmark %s)\n") % brev)
743 bookmarks.activate(repo, brev)
743 bookmarks.activate(repo, brev)
744 elif brev:
744 elif brev:
745 if repo._activebookmark:
745 if repo._activebookmark:
746 ui.status(_("(leaving bookmark %s)\n") %
746 ui.status(_("(leaving bookmark %s)\n") %
747 repo._activebookmark)
747 repo._activebookmark)
748 bookmarks.deactivate(repo)
748 bookmarks.deactivate(repo)
749
749
750 if warndest:
750 if warndest:
751 destutil.statusotherdests(ui, repo)
751 destutil.statusotherdests(ui, repo)
752
752
753 return ret
753 return ret
754
754
755 def merge(repo, node, force=None, remind=True, mergeforce=False):
755 def merge(repo, node, force=None, remind=True, mergeforce=False):
756 """Branch merge with node, resolving changes. Return true if any
756 """Branch merge with node, resolving changes. Return true if any
757 unresolved conflicts."""
757 unresolved conflicts."""
758 stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce)
758 stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce)
759 _showstats(repo, stats)
759 _showstats(repo, stats)
760 if stats[3]:
760 if stats[3]:
761 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
761 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
762 "or 'hg update -C .' to abandon\n"))
762 "or 'hg update -C .' to abandon\n"))
763 elif remind:
763 elif remind:
764 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
764 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
765 return stats[3] > 0
765 return stats[3] > 0
766
766
767 def _incoming(displaychlist, subreporecurse, ui, repo, source,
767 def _incoming(displaychlist, subreporecurse, ui, repo, source,
768 opts, buffered=False):
768 opts, buffered=False):
769 """
769 """
770 Helper for incoming / gincoming.
770 Helper for incoming / gincoming.
771 displaychlist gets called with
771 displaychlist gets called with
772 (remoterepo, incomingchangesetlist, displayer) parameters,
772 (remoterepo, incomingchangesetlist, displayer) parameters,
773 and is supposed to contain only code that can't be unified.
773 and is supposed to contain only code that can't be unified.
774 """
774 """
775 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
775 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
776 other = peer(repo, opts, source)
776 other = peer(repo, opts, source)
777 ui.status(_('comparing with %s\n') % util.hidepassword(source))
777 ui.status(_('comparing with %s\n') % util.hidepassword(source))
778 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
778 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
779
779
780 if revs:
780 if revs:
781 revs = [other.lookup(rev) for rev in revs]
781 revs = [other.lookup(rev) for rev in revs]
782 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
782 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
783 revs, opts["bundle"], opts["force"])
783 revs, opts["bundle"], opts["force"])
784 try:
784 try:
785 if not chlist:
785 if not chlist:
786 ui.status(_("no changes found\n"))
786 ui.status(_("no changes found\n"))
787 return subreporecurse()
787 return subreporecurse()
788
788
789 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
789 displayer = cmdutil.show_changeset(ui, other, opts, buffered)
790 displaychlist(other, chlist, displayer)
790 displaychlist(other, chlist, displayer)
791 displayer.close()
791 displayer.close()
792 finally:
792 finally:
793 cleanupfn()
793 cleanupfn()
794 subreporecurse()
794 subreporecurse()
795 return 0 # exit code is zero since we found incoming changes
795 return 0 # exit code is zero since we found incoming changes
796
796
797 def incoming(ui, repo, source, opts):
797 def incoming(ui, repo, source, opts):
798 def subreporecurse():
798 def subreporecurse():
799 ret = 1
799 ret = 1
800 if opts.get('subrepos'):
800 if opts.get('subrepos'):
801 ctx = repo[None]
801 ctx = repo[None]
802 for subpath in sorted(ctx.substate):
802 for subpath in sorted(ctx.substate):
803 sub = ctx.sub(subpath)
803 sub = ctx.sub(subpath)
804 ret = min(ret, sub.incoming(ui, source, opts))
804 ret = min(ret, sub.incoming(ui, source, opts))
805 return ret
805 return ret
806
806
807 def display(other, chlist, displayer):
807 def display(other, chlist, displayer):
808 limit = cmdutil.loglimit(opts)
808 limit = cmdutil.loglimit(opts)
809 if opts.get('newest_first'):
809 if opts.get('newest_first'):
810 chlist.reverse()
810 chlist.reverse()
811 count = 0
811 count = 0
812 for n in chlist:
812 for n in chlist:
813 if limit is not None and count >= limit:
813 if limit is not None and count >= limit:
814 break
814 break
815 parents = [p for p in other.changelog.parents(n) if p != nullid]
815 parents = [p for p in other.changelog.parents(n) if p != nullid]
816 if opts.get('no_merges') and len(parents) == 2:
816 if opts.get('no_merges') and len(parents) == 2:
817 continue
817 continue
818 count += 1
818 count += 1
819 displayer.show(other[n])
819 displayer.show(other[n])
820 return _incoming(display, subreporecurse, ui, repo, source, opts)
820 return _incoming(display, subreporecurse, ui, repo, source, opts)
821
821
822 def _outgoing(ui, repo, dest, opts):
822 def _outgoing(ui, repo, dest, opts):
823 dest = ui.expandpath(dest or 'default-push', dest or 'default')
823 dest = ui.expandpath(dest or 'default-push', dest or 'default')
824 dest, branches = parseurl(dest, opts.get('branch'))
824 dest, branches = parseurl(dest, opts.get('branch'))
825 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
825 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
826 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
826 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
827 if revs:
827 if revs:
828 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
828 revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
829
829
830 other = peer(repo, opts, dest)
830 other = peer(repo, opts, dest)
831 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
831 outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs,
832 force=opts.get('force'))
832 force=opts.get('force'))
833 o = outgoing.missing
833 o = outgoing.missing
834 if not o:
834 if not o:
835 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
835 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
836 return o, other
836 return o, other
837
837
838 def outgoing(ui, repo, dest, opts):
838 def outgoing(ui, repo, dest, opts):
839 def recurse():
839 def recurse():
840 ret = 1
840 ret = 1
841 if opts.get('subrepos'):
841 if opts.get('subrepos'):
842 ctx = repo[None]
842 ctx = repo[None]
843 for subpath in sorted(ctx.substate):
843 for subpath in sorted(ctx.substate):
844 sub = ctx.sub(subpath)
844 sub = ctx.sub(subpath)
845 ret = min(ret, sub.outgoing(ui, dest, opts))
845 ret = min(ret, sub.outgoing(ui, dest, opts))
846 return ret
846 return ret
847
847
848 limit = cmdutil.loglimit(opts)
848 limit = cmdutil.loglimit(opts)
849 o, other = _outgoing(ui, repo, dest, opts)
849 o, other = _outgoing(ui, repo, dest, opts)
850 if not o:
850 if not o:
851 cmdutil.outgoinghooks(ui, repo, other, opts, o)
851 cmdutil.outgoinghooks(ui, repo, other, opts, o)
852 return recurse()
852 return recurse()
853
853
854 if opts.get('newest_first'):
854 if opts.get('newest_first'):
855 o.reverse()
855 o.reverse()
856 displayer = cmdutil.show_changeset(ui, repo, opts)
856 displayer = cmdutil.show_changeset(ui, repo, opts)
857 count = 0
857 count = 0
858 for n in o:
858 for n in o:
859 if limit is not None and count >= limit:
859 if limit is not None and count >= limit:
860 break
860 break
861 parents = [p for p in repo.changelog.parents(n) if p != nullid]
861 parents = [p for p in repo.changelog.parents(n) if p != nullid]
862 if opts.get('no_merges') and len(parents) == 2:
862 if opts.get('no_merges') and len(parents) == 2:
863 continue
863 continue
864 count += 1
864 count += 1
865 displayer.show(repo[n])
865 displayer.show(repo[n])
866 displayer.close()
866 displayer.close()
867 cmdutil.outgoinghooks(ui, repo, other, opts, o)
867 cmdutil.outgoinghooks(ui, repo, other, opts, o)
868 recurse()
868 recurse()
869 return 0 # exit code is zero since we found outgoing changes
869 return 0 # exit code is zero since we found outgoing changes
870
870
871 def verify(repo):
871 def verify(repo):
872 """verify the consistency of a repository"""
872 """verify the consistency of a repository"""
873 ret = verifymod.verify(repo)
873 ret = verifymod.verify(repo)
874
874
875 # Broken subrepo references in hidden csets don't seem worth worrying about,
875 # Broken subrepo references in hidden csets don't seem worth worrying about,
876 # since they can't be pushed/pulled, and --hidden can be used if they are a
876 # since they can't be pushed/pulled, and --hidden can be used if they are a
877 # concern.
877 # concern.
878
878
879 # pathto() is needed for -R case
879 # pathto() is needed for -R case
880 revs = repo.revs("filelog(%s)",
880 revs = repo.revs("filelog(%s)",
881 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
881 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
882
882
883 if revs:
883 if revs:
884 repo.ui.status(_('checking subrepo links\n'))
884 repo.ui.status(_('checking subrepo links\n'))
885 for rev in revs:
885 for rev in revs:
886 ctx = repo[rev]
886 ctx = repo[rev]
887 try:
887 try:
888 for subpath in ctx.substate:
888 for subpath in ctx.substate:
889 ret = ctx.sub(subpath).verify() or ret
889 try:
890 ret = (ctx.sub(subpath, allowcreate=False).verify()
891 or ret)
892 except error.RepoError as e:
893 repo.ui.warn(_('%s: %s\n') % (rev, e))
890 except Exception:
894 except Exception:
891 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
895 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
892 node.short(ctx.node()))
896 node.short(ctx.node()))
893
897
894 return ret
898 return ret
895
899
896 def remoteui(src, opts):
900 def remoteui(src, opts):
897 'build a remote ui from ui or repo and opts'
901 'build a remote ui from ui or repo and opts'
898 if util.safehasattr(src, 'baseui'): # looks like a repository
902 if util.safehasattr(src, 'baseui'): # looks like a repository
899 dst = src.baseui.copy() # drop repo-specific config
903 dst = src.baseui.copy() # drop repo-specific config
900 src = src.ui # copy target options from repo
904 src = src.ui # copy target options from repo
901 else: # assume it's a global ui object
905 else: # assume it's a global ui object
902 dst = src.copy() # keep all global options
906 dst = src.copy() # keep all global options
903
907
904 # copy ssh-specific options
908 # copy ssh-specific options
905 for o in 'ssh', 'remotecmd':
909 for o in 'ssh', 'remotecmd':
906 v = opts.get(o) or src.config('ui', o)
910 v = opts.get(o) or src.config('ui', o)
907 if v:
911 if v:
908 dst.setconfig("ui", o, v, 'copied')
912 dst.setconfig("ui", o, v, 'copied')
909
913
910 # copy bundle-specific options
914 # copy bundle-specific options
911 r = src.config('bundle', 'mainreporoot')
915 r = src.config('bundle', 'mainreporoot')
912 if r:
916 if r:
913 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
917 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
914
918
915 # copy selected local settings to the remote ui
919 # copy selected local settings to the remote ui
916 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
920 for sect in ('auth', 'hostfingerprints', 'http_proxy'):
917 for key, val in src.configitems(sect):
921 for key, val in src.configitems(sect):
918 dst.setconfig(sect, key, val, 'copied')
922 dst.setconfig(sect, key, val, 'copied')
919 v = src.config('web', 'cacerts')
923 v = src.config('web', 'cacerts')
920 if v == '!':
924 if v == '!':
921 dst.setconfig('web', 'cacerts', v, 'copied')
925 dst.setconfig('web', 'cacerts', v, 'copied')
922 elif v:
926 elif v:
923 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
927 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
924
928
925 return dst
929 return dst
926
930
927 # Files of interest
931 # Files of interest
928 # Used to check if the repository has changed looking at mtime and size of
932 # Used to check if the repository has changed looking at mtime and size of
929 # these files.
933 # these files.
930 foi = [('spath', '00changelog.i'),
934 foi = [('spath', '00changelog.i'),
931 ('spath', 'phaseroots'), # ! phase can change content at the same size
935 ('spath', 'phaseroots'), # ! phase can change content at the same size
932 ('spath', 'obsstore'),
936 ('spath', 'obsstore'),
933 ('path', 'bookmarks'), # ! bookmark can change content at the same size
937 ('path', 'bookmarks'), # ! bookmark can change content at the same size
934 ]
938 ]
935
939
936 class cachedlocalrepo(object):
940 class cachedlocalrepo(object):
937 """Holds a localrepository that can be cached and reused."""
941 """Holds a localrepository that can be cached and reused."""
938
942
939 def __init__(self, repo):
943 def __init__(self, repo):
940 """Create a new cached repo from an existing repo.
944 """Create a new cached repo from an existing repo.
941
945
942 We assume the passed in repo was recently created. If the
946 We assume the passed in repo was recently created. If the
943 repo has changed between when it was created and when it was
947 repo has changed between when it was created and when it was
944 turned into a cache, it may not refresh properly.
948 turned into a cache, it may not refresh properly.
945 """
949 """
946 assert isinstance(repo, localrepo.localrepository)
950 assert isinstance(repo, localrepo.localrepository)
947 self._repo = repo
951 self._repo = repo
948 self._state, self.mtime = self._repostate()
952 self._state, self.mtime = self._repostate()
949 self._filtername = repo.filtername
953 self._filtername = repo.filtername
950
954
951 def fetch(self):
955 def fetch(self):
952 """Refresh (if necessary) and return a repository.
956 """Refresh (if necessary) and return a repository.
953
957
954 If the cached instance is out of date, it will be recreated
958 If the cached instance is out of date, it will be recreated
955 automatically and returned.
959 automatically and returned.
956
960
957 Returns a tuple of the repo and a boolean indicating whether a new
961 Returns a tuple of the repo and a boolean indicating whether a new
958 repo instance was created.
962 repo instance was created.
959 """
963 """
960 # We compare the mtimes and sizes of some well-known files to
964 # We compare the mtimes and sizes of some well-known files to
961 # determine if the repo changed. This is not precise, as mtimes
965 # determine if the repo changed. This is not precise, as mtimes
962 # are susceptible to clock skew and imprecise filesystems and
966 # are susceptible to clock skew and imprecise filesystems and
963 # file content can change while maintaining the same size.
967 # file content can change while maintaining the same size.
964
968
965 state, mtime = self._repostate()
969 state, mtime = self._repostate()
966 if state == self._state:
970 if state == self._state:
967 return self._repo, False
971 return self._repo, False
968
972
969 repo = repository(self._repo.baseui, self._repo.url())
973 repo = repository(self._repo.baseui, self._repo.url())
970 if self._filtername:
974 if self._filtername:
971 self._repo = repo.filtered(self._filtername)
975 self._repo = repo.filtered(self._filtername)
972 else:
976 else:
973 self._repo = repo.unfiltered()
977 self._repo = repo.unfiltered()
974 self._state = state
978 self._state = state
975 self.mtime = mtime
979 self.mtime = mtime
976
980
977 return self._repo, True
981 return self._repo, True
978
982
979 def _repostate(self):
983 def _repostate(self):
980 state = []
984 state = []
981 maxmtime = -1
985 maxmtime = -1
982 for attr, fname in foi:
986 for attr, fname in foi:
983 prefix = getattr(self._repo, attr)
987 prefix = getattr(self._repo, attr)
984 p = os.path.join(prefix, fname)
988 p = os.path.join(prefix, fname)
985 try:
989 try:
986 st = os.stat(p)
990 st = os.stat(p)
987 except OSError:
991 except OSError:
988 st = os.stat(prefix)
992 st = os.stat(prefix)
989 state.append((st.st_mtime, st.st_size))
993 state.append((st.st_mtime, st.st_size))
990 maxmtime = max(maxmtime, st.st_mtime)
994 maxmtime = max(maxmtime, st.st_mtime)
991
995
992 return tuple(state), maxmtime
996 return tuple(state), maxmtime
993
997
994 def copy(self):
998 def copy(self):
995 """Obtain a copy of this class instance.
999 """Obtain a copy of this class instance.
996
1000
997 A new localrepository instance is obtained. The new instance should be
1001 A new localrepository instance is obtained. The new instance should be
998 completely independent of the original.
1002 completely independent of the original.
999 """
1003 """
1000 repo = repository(self._repo.baseui, self._repo.origroot)
1004 repo = repository(self._repo.baseui, self._repo.origroot)
1001 if self._filtername:
1005 if self._filtername:
1002 repo = repo.filtered(self._filtername)
1006 repo = repo.filtered(self._filtername)
1003 else:
1007 else:
1004 repo = repo.unfiltered()
1008 repo = repo.unfiltered()
1005 c = cachedlocalrepo(repo)
1009 c = cachedlocalrepo(repo)
1006 c._state = self._state
1010 c._state = self._state
1007 c.mtime = self.mtime
1011 c.mtime = self.mtime
1008 return c
1012 return c
@@ -1,1947 +1,1947
1 # subrepo.py - sub-repository handling for Mercurial
1 # subrepo.py - sub-repository handling for Mercurial
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import os
12 import os
13 import posixpath
13 import posixpath
14 import re
14 import re
15 import stat
15 import stat
16 import subprocess
16 import subprocess
17 import sys
17 import sys
18 import tarfile
18 import tarfile
19 import xml.dom.minidom
19 import xml.dom.minidom
20
20
21
21
22 from .i18n import _
22 from .i18n import _
23 from . import (
23 from . import (
24 cmdutil,
24 cmdutil,
25 config,
25 config,
26 error,
26 error,
27 exchange,
27 exchange,
28 match as matchmod,
28 match as matchmod,
29 node,
29 node,
30 pathutil,
30 pathutil,
31 phases,
31 phases,
32 scmutil,
32 scmutil,
33 util,
33 util,
34 )
34 )
35
35
36 hg = None
36 hg = None
37 propertycache = util.propertycache
37 propertycache = util.propertycache
38
38
39 nullstate = ('', '', 'empty')
39 nullstate = ('', '', 'empty')
40
40
41 def _expandedabspath(path):
41 def _expandedabspath(path):
42 '''
42 '''
43 get a path or url and if it is a path expand it and return an absolute path
43 get a path or url and if it is a path expand it and return an absolute path
44 '''
44 '''
45 expandedpath = util.urllocalpath(util.expandpath(path))
45 expandedpath = util.urllocalpath(util.expandpath(path))
46 u = util.url(expandedpath)
46 u = util.url(expandedpath)
47 if not u.scheme:
47 if not u.scheme:
48 path = util.normpath(os.path.abspath(u.path))
48 path = util.normpath(os.path.abspath(u.path))
49 return path
49 return path
50
50
51 def _getstorehashcachename(remotepath):
51 def _getstorehashcachename(remotepath):
52 '''get a unique filename for the store hash cache of a remote repository'''
52 '''get a unique filename for the store hash cache of a remote repository'''
53 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
53 return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12]
54
54
55 class SubrepoAbort(error.Abort):
55 class SubrepoAbort(error.Abort):
56 """Exception class used to avoid handling a subrepo error more than once"""
56 """Exception class used to avoid handling a subrepo error more than once"""
57 def __init__(self, *args, **kw):
57 def __init__(self, *args, **kw):
58 error.Abort.__init__(self, *args, **kw)
58 error.Abort.__init__(self, *args, **kw)
59 self.subrepo = kw.get('subrepo')
59 self.subrepo = kw.get('subrepo')
60 self.cause = kw.get('cause')
60 self.cause = kw.get('cause')
61
61
62 def annotatesubrepoerror(func):
62 def annotatesubrepoerror(func):
63 def decoratedmethod(self, *args, **kargs):
63 def decoratedmethod(self, *args, **kargs):
64 try:
64 try:
65 res = func(self, *args, **kargs)
65 res = func(self, *args, **kargs)
66 except SubrepoAbort as ex:
66 except SubrepoAbort as ex:
67 # This exception has already been handled
67 # This exception has already been handled
68 raise ex
68 raise ex
69 except error.Abort as ex:
69 except error.Abort as ex:
70 subrepo = subrelpath(self)
70 subrepo = subrelpath(self)
71 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
71 errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo
72 # avoid handling this exception by raising a SubrepoAbort exception
72 # avoid handling this exception by raising a SubrepoAbort exception
73 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
73 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
74 cause=sys.exc_info())
74 cause=sys.exc_info())
75 return res
75 return res
76 return decoratedmethod
76 return decoratedmethod
77
77
78 def state(ctx, ui):
78 def state(ctx, ui):
79 """return a state dict, mapping subrepo paths configured in .hgsub
79 """return a state dict, mapping subrepo paths configured in .hgsub
80 to tuple: (source from .hgsub, revision from .hgsubstate, kind
80 to tuple: (source from .hgsub, revision from .hgsubstate, kind
81 (key in types dict))
81 (key in types dict))
82 """
82 """
83 p = config.config()
83 p = config.config()
84 repo = ctx.repo()
84 repo = ctx.repo()
85 def read(f, sections=None, remap=None):
85 def read(f, sections=None, remap=None):
86 if f in ctx:
86 if f in ctx:
87 try:
87 try:
88 data = ctx[f].data()
88 data = ctx[f].data()
89 except IOError as err:
89 except IOError as err:
90 if err.errno != errno.ENOENT:
90 if err.errno != errno.ENOENT:
91 raise
91 raise
92 # handle missing subrepo spec files as removed
92 # handle missing subrepo spec files as removed
93 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
93 ui.warn(_("warning: subrepo spec file \'%s\' not found\n") %
94 repo.pathto(f))
94 repo.pathto(f))
95 return
95 return
96 p.parse(f, data, sections, remap, read)
96 p.parse(f, data, sections, remap, read)
97 else:
97 else:
98 raise error.Abort(_("subrepo spec file \'%s\' not found") %
98 raise error.Abort(_("subrepo spec file \'%s\' not found") %
99 repo.pathto(f))
99 repo.pathto(f))
100 if '.hgsub' in ctx:
100 if '.hgsub' in ctx:
101 read('.hgsub')
101 read('.hgsub')
102
102
103 for path, src in ui.configitems('subpaths'):
103 for path, src in ui.configitems('subpaths'):
104 p.set('subpaths', path, src, ui.configsource('subpaths', path))
104 p.set('subpaths', path, src, ui.configsource('subpaths', path))
105
105
106 rev = {}
106 rev = {}
107 if '.hgsubstate' in ctx:
107 if '.hgsubstate' in ctx:
108 try:
108 try:
109 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
109 for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
110 l = l.lstrip()
110 l = l.lstrip()
111 if not l:
111 if not l:
112 continue
112 continue
113 try:
113 try:
114 revision, path = l.split(" ", 1)
114 revision, path = l.split(" ", 1)
115 except ValueError:
115 except ValueError:
116 raise error.Abort(_("invalid subrepository revision "
116 raise error.Abort(_("invalid subrepository revision "
117 "specifier in \'%s\' line %d")
117 "specifier in \'%s\' line %d")
118 % (repo.pathto('.hgsubstate'), (i + 1)))
118 % (repo.pathto('.hgsubstate'), (i + 1)))
119 rev[path] = revision
119 rev[path] = revision
120 except IOError as err:
120 except IOError as err:
121 if err.errno != errno.ENOENT:
121 if err.errno != errno.ENOENT:
122 raise
122 raise
123
123
124 def remap(src):
124 def remap(src):
125 for pattern, repl in p.items('subpaths'):
125 for pattern, repl in p.items('subpaths'):
126 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
126 # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
127 # does a string decode.
127 # does a string decode.
128 repl = repl.encode('string-escape')
128 repl = repl.encode('string-escape')
129 # However, we still want to allow back references to go
129 # However, we still want to allow back references to go
130 # through unharmed, so we turn r'\\1' into r'\1'. Again,
130 # through unharmed, so we turn r'\\1' into r'\1'. Again,
131 # extra escapes are needed because re.sub string decodes.
131 # extra escapes are needed because re.sub string decodes.
132 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
132 repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl)
133 try:
133 try:
134 src = re.sub(pattern, repl, src, 1)
134 src = re.sub(pattern, repl, src, 1)
135 except re.error as e:
135 except re.error as e:
136 raise error.Abort(_("bad subrepository pattern in %s: %s")
136 raise error.Abort(_("bad subrepository pattern in %s: %s")
137 % (p.source('subpaths', pattern), e))
137 % (p.source('subpaths', pattern), e))
138 return src
138 return src
139
139
140 state = {}
140 state = {}
141 for path, src in p[''].items():
141 for path, src in p[''].items():
142 kind = 'hg'
142 kind = 'hg'
143 if src.startswith('['):
143 if src.startswith('['):
144 if ']' not in src:
144 if ']' not in src:
145 raise error.Abort(_('missing ] in subrepo source'))
145 raise error.Abort(_('missing ] in subrepo source'))
146 kind, src = src.split(']', 1)
146 kind, src = src.split(']', 1)
147 kind = kind[1:]
147 kind = kind[1:]
148 src = src.lstrip() # strip any extra whitespace after ']'
148 src = src.lstrip() # strip any extra whitespace after ']'
149
149
150 if not util.url(src).isabs():
150 if not util.url(src).isabs():
151 parent = _abssource(repo, abort=False)
151 parent = _abssource(repo, abort=False)
152 if parent:
152 if parent:
153 parent = util.url(parent)
153 parent = util.url(parent)
154 parent.path = posixpath.join(parent.path or '', src)
154 parent.path = posixpath.join(parent.path or '', src)
155 parent.path = posixpath.normpath(parent.path)
155 parent.path = posixpath.normpath(parent.path)
156 joined = str(parent)
156 joined = str(parent)
157 # Remap the full joined path and use it if it changes,
157 # Remap the full joined path and use it if it changes,
158 # else remap the original source.
158 # else remap the original source.
159 remapped = remap(joined)
159 remapped = remap(joined)
160 if remapped == joined:
160 if remapped == joined:
161 src = remap(src)
161 src = remap(src)
162 else:
162 else:
163 src = remapped
163 src = remapped
164
164
165 src = remap(src)
165 src = remap(src)
166 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
166 state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
167
167
168 return state
168 return state
169
169
170 def writestate(repo, state):
170 def writestate(repo, state):
171 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
171 """rewrite .hgsubstate in (outer) repo with these subrepo states"""
172 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
172 lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state)
173 if state[s][1] != nullstate[1]]
173 if state[s][1] != nullstate[1]]
174 repo.wwrite('.hgsubstate', ''.join(lines), '')
174 repo.wwrite('.hgsubstate', ''.join(lines), '')
175
175
176 def submerge(repo, wctx, mctx, actx, overwrite):
176 def submerge(repo, wctx, mctx, actx, overwrite):
177 """delegated from merge.applyupdates: merging of .hgsubstate file
177 """delegated from merge.applyupdates: merging of .hgsubstate file
178 in working context, merging context and ancestor context"""
178 in working context, merging context and ancestor context"""
179 if mctx == actx: # backwards?
179 if mctx == actx: # backwards?
180 actx = wctx.p1()
180 actx = wctx.p1()
181 s1 = wctx.substate
181 s1 = wctx.substate
182 s2 = mctx.substate
182 s2 = mctx.substate
183 sa = actx.substate
183 sa = actx.substate
184 sm = {}
184 sm = {}
185
185
186 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
186 repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx))
187
187
188 def debug(s, msg, r=""):
188 def debug(s, msg, r=""):
189 if r:
189 if r:
190 r = "%s:%s:%s" % r
190 r = "%s:%s:%s" % r
191 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
191 repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r))
192
192
193 for s, l in sorted(s1.iteritems()):
193 for s, l in sorted(s1.iteritems()):
194 a = sa.get(s, nullstate)
194 a = sa.get(s, nullstate)
195 ld = l # local state with possible dirty flag for compares
195 ld = l # local state with possible dirty flag for compares
196 if wctx.sub(s).dirty():
196 if wctx.sub(s).dirty():
197 ld = (l[0], l[1] + "+")
197 ld = (l[0], l[1] + "+")
198 if wctx == actx: # overwrite
198 if wctx == actx: # overwrite
199 a = ld
199 a = ld
200
200
201 if s in s2:
201 if s in s2:
202 r = s2[s]
202 r = s2[s]
203 if ld == r or r == a: # no change or local is newer
203 if ld == r or r == a: # no change or local is newer
204 sm[s] = l
204 sm[s] = l
205 continue
205 continue
206 elif ld == a: # other side changed
206 elif ld == a: # other side changed
207 debug(s, "other changed, get", r)
207 debug(s, "other changed, get", r)
208 wctx.sub(s).get(r, overwrite)
208 wctx.sub(s).get(r, overwrite)
209 sm[s] = r
209 sm[s] = r
210 elif ld[0] != r[0]: # sources differ
210 elif ld[0] != r[0]: # sources differ
211 if repo.ui.promptchoice(
211 if repo.ui.promptchoice(
212 _(' subrepository sources for %s differ\n'
212 _(' subrepository sources for %s differ\n'
213 'use (l)ocal source (%s) or (r)emote source (%s)?'
213 'use (l)ocal source (%s) or (r)emote source (%s)?'
214 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
214 '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0):
215 debug(s, "prompt changed, get", r)
215 debug(s, "prompt changed, get", r)
216 wctx.sub(s).get(r, overwrite)
216 wctx.sub(s).get(r, overwrite)
217 sm[s] = r
217 sm[s] = r
218 elif ld[1] == a[1]: # local side is unchanged
218 elif ld[1] == a[1]: # local side is unchanged
219 debug(s, "other side changed, get", r)
219 debug(s, "other side changed, get", r)
220 wctx.sub(s).get(r, overwrite)
220 wctx.sub(s).get(r, overwrite)
221 sm[s] = r
221 sm[s] = r
222 else:
222 else:
223 debug(s, "both sides changed")
223 debug(s, "both sides changed")
224 srepo = wctx.sub(s)
224 srepo = wctx.sub(s)
225 option = repo.ui.promptchoice(
225 option = repo.ui.promptchoice(
226 _(' subrepository %s diverged (local revision: %s, '
226 _(' subrepository %s diverged (local revision: %s, '
227 'remote revision: %s)\n'
227 'remote revision: %s)\n'
228 '(M)erge, keep (l)ocal or keep (r)emote?'
228 '(M)erge, keep (l)ocal or keep (r)emote?'
229 '$$ &Merge $$ &Local $$ &Remote')
229 '$$ &Merge $$ &Local $$ &Remote')
230 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
230 % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0)
231 if option == 0:
231 if option == 0:
232 wctx.sub(s).merge(r)
232 wctx.sub(s).merge(r)
233 sm[s] = l
233 sm[s] = l
234 debug(s, "merge with", r)
234 debug(s, "merge with", r)
235 elif option == 1:
235 elif option == 1:
236 sm[s] = l
236 sm[s] = l
237 debug(s, "keep local subrepo revision", l)
237 debug(s, "keep local subrepo revision", l)
238 else:
238 else:
239 wctx.sub(s).get(r, overwrite)
239 wctx.sub(s).get(r, overwrite)
240 sm[s] = r
240 sm[s] = r
241 debug(s, "get remote subrepo revision", r)
241 debug(s, "get remote subrepo revision", r)
242 elif ld == a: # remote removed, local unchanged
242 elif ld == a: # remote removed, local unchanged
243 debug(s, "remote removed, remove")
243 debug(s, "remote removed, remove")
244 wctx.sub(s).remove()
244 wctx.sub(s).remove()
245 elif a == nullstate: # not present in remote or ancestor
245 elif a == nullstate: # not present in remote or ancestor
246 debug(s, "local added, keep")
246 debug(s, "local added, keep")
247 sm[s] = l
247 sm[s] = l
248 continue
248 continue
249 else:
249 else:
250 if repo.ui.promptchoice(
250 if repo.ui.promptchoice(
251 _(' local changed subrepository %s which remote removed\n'
251 _(' local changed subrepository %s which remote removed\n'
252 'use (c)hanged version or (d)elete?'
252 'use (c)hanged version or (d)elete?'
253 '$$ &Changed $$ &Delete') % s, 0):
253 '$$ &Changed $$ &Delete') % s, 0):
254 debug(s, "prompt remove")
254 debug(s, "prompt remove")
255 wctx.sub(s).remove()
255 wctx.sub(s).remove()
256
256
257 for s, r in sorted(s2.items()):
257 for s, r in sorted(s2.items()):
258 if s in s1:
258 if s in s1:
259 continue
259 continue
260 elif s not in sa:
260 elif s not in sa:
261 debug(s, "remote added, get", r)
261 debug(s, "remote added, get", r)
262 mctx.sub(s).get(r)
262 mctx.sub(s).get(r)
263 sm[s] = r
263 sm[s] = r
264 elif r != sa[s]:
264 elif r != sa[s]:
265 if repo.ui.promptchoice(
265 if repo.ui.promptchoice(
266 _(' remote changed subrepository %s which local removed\n'
266 _(' remote changed subrepository %s which local removed\n'
267 'use (c)hanged version or (d)elete?'
267 'use (c)hanged version or (d)elete?'
268 '$$ &Changed $$ &Delete') % s, 0) == 0:
268 '$$ &Changed $$ &Delete') % s, 0) == 0:
269 debug(s, "prompt recreate", r)
269 debug(s, "prompt recreate", r)
270 mctx.sub(s).get(r)
270 mctx.sub(s).get(r)
271 sm[s] = r
271 sm[s] = r
272
272
273 # record merged .hgsubstate
273 # record merged .hgsubstate
274 writestate(repo, sm)
274 writestate(repo, sm)
275 return sm
275 return sm
276
276
277 def _updateprompt(ui, sub, dirty, local, remote):
277 def _updateprompt(ui, sub, dirty, local, remote):
278 if dirty:
278 if dirty:
279 msg = (_(' subrepository sources for %s differ\n'
279 msg = (_(' subrepository sources for %s differ\n'
280 'use (l)ocal source (%s) or (r)emote source (%s)?'
280 'use (l)ocal source (%s) or (r)emote source (%s)?'
281 '$$ &Local $$ &Remote')
281 '$$ &Local $$ &Remote')
282 % (subrelpath(sub), local, remote))
282 % (subrelpath(sub), local, remote))
283 else:
283 else:
284 msg = (_(' subrepository sources for %s differ (in checked out '
284 msg = (_(' subrepository sources for %s differ (in checked out '
285 'version)\n'
285 'version)\n'
286 'use (l)ocal source (%s) or (r)emote source (%s)?'
286 'use (l)ocal source (%s) or (r)emote source (%s)?'
287 '$$ &Local $$ &Remote')
287 '$$ &Local $$ &Remote')
288 % (subrelpath(sub), local, remote))
288 % (subrelpath(sub), local, remote))
289 return ui.promptchoice(msg, 0)
289 return ui.promptchoice(msg, 0)
290
290
291 def reporelpath(repo):
291 def reporelpath(repo):
292 """return path to this (sub)repo as seen from outermost repo"""
292 """return path to this (sub)repo as seen from outermost repo"""
293 parent = repo
293 parent = repo
294 while util.safehasattr(parent, '_subparent'):
294 while util.safehasattr(parent, '_subparent'):
295 parent = parent._subparent
295 parent = parent._subparent
296 return repo.root[len(pathutil.normasprefix(parent.root)):]
296 return repo.root[len(pathutil.normasprefix(parent.root)):]
297
297
298 def subrelpath(sub):
298 def subrelpath(sub):
299 """return path to this subrepo as seen from outermost repo"""
299 """return path to this subrepo as seen from outermost repo"""
300 return sub._relpath
300 return sub._relpath
301
301
302 def _abssource(repo, push=False, abort=True):
302 def _abssource(repo, push=False, abort=True):
303 """return pull/push path of repo - either based on parent repo .hgsub info
303 """return pull/push path of repo - either based on parent repo .hgsub info
304 or on the top repo config. Abort or return None if no source found."""
304 or on the top repo config. Abort or return None if no source found."""
305 if util.safehasattr(repo, '_subparent'):
305 if util.safehasattr(repo, '_subparent'):
306 source = util.url(repo._subsource)
306 source = util.url(repo._subsource)
307 if source.isabs():
307 if source.isabs():
308 return str(source)
308 return str(source)
309 source.path = posixpath.normpath(source.path)
309 source.path = posixpath.normpath(source.path)
310 parent = _abssource(repo._subparent, push, abort=False)
310 parent = _abssource(repo._subparent, push, abort=False)
311 if parent:
311 if parent:
312 parent = util.url(util.pconvert(parent))
312 parent = util.url(util.pconvert(parent))
313 parent.path = posixpath.join(parent.path or '', source.path)
313 parent.path = posixpath.join(parent.path or '', source.path)
314 parent.path = posixpath.normpath(parent.path)
314 parent.path = posixpath.normpath(parent.path)
315 return str(parent)
315 return str(parent)
316 else: # recursion reached top repo
316 else: # recursion reached top repo
317 if util.safehasattr(repo, '_subtoppath'):
317 if util.safehasattr(repo, '_subtoppath'):
318 return repo._subtoppath
318 return repo._subtoppath
319 if push and repo.ui.config('paths', 'default-push'):
319 if push and repo.ui.config('paths', 'default-push'):
320 return repo.ui.config('paths', 'default-push')
320 return repo.ui.config('paths', 'default-push')
321 if repo.ui.config('paths', 'default'):
321 if repo.ui.config('paths', 'default'):
322 return repo.ui.config('paths', 'default')
322 return repo.ui.config('paths', 'default')
323 if repo.shared():
323 if repo.shared():
324 # chop off the .hg component to get the default path form
324 # chop off the .hg component to get the default path form
325 return os.path.dirname(repo.sharedpath)
325 return os.path.dirname(repo.sharedpath)
326 if abort:
326 if abort:
327 raise error.Abort(_("default path for subrepository not found"))
327 raise error.Abort(_("default path for subrepository not found"))
328
328
329 def _sanitize(ui, vfs, ignore):
329 def _sanitize(ui, vfs, ignore):
330 for dirname, dirs, names in vfs.walk():
330 for dirname, dirs, names in vfs.walk():
331 for i, d in enumerate(dirs):
331 for i, d in enumerate(dirs):
332 if d.lower() == ignore:
332 if d.lower() == ignore:
333 del dirs[i]
333 del dirs[i]
334 break
334 break
335 if vfs.basename(dirname).lower() != '.hg':
335 if vfs.basename(dirname).lower() != '.hg':
336 continue
336 continue
337 for f in names:
337 for f in names:
338 if f.lower() == 'hgrc':
338 if f.lower() == 'hgrc':
339 ui.warn(_("warning: removing potentially hostile 'hgrc' "
339 ui.warn(_("warning: removing potentially hostile 'hgrc' "
340 "in '%s'\n") % vfs.join(dirname))
340 "in '%s'\n") % vfs.join(dirname))
341 vfs.unlink(vfs.reljoin(dirname, f))
341 vfs.unlink(vfs.reljoin(dirname, f))
342
342
343 def subrepo(ctx, path, allowwdir=False):
343 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
344 """return instance of the right subrepo class for subrepo in path"""
344 """return instance of the right subrepo class for subrepo in path"""
345 # subrepo inherently violates our import layering rules
345 # subrepo inherently violates our import layering rules
346 # because it wants to make repo objects from deep inside the stack
346 # because it wants to make repo objects from deep inside the stack
347 # so we manually delay the circular imports to not break
347 # so we manually delay the circular imports to not break
348 # scripts that don't use our demand-loading
348 # scripts that don't use our demand-loading
349 global hg
349 global hg
350 from . import hg as h
350 from . import hg as h
351 hg = h
351 hg = h
352
352
353 pathutil.pathauditor(ctx.repo().root)(path)
353 pathutil.pathauditor(ctx.repo().root)(path)
354 state = ctx.substate[path]
354 state = ctx.substate[path]
355 if state[2] not in types:
355 if state[2] not in types:
356 raise error.Abort(_('unknown subrepo type %s') % state[2])
356 raise error.Abort(_('unknown subrepo type %s') % state[2])
357 if allowwdir:
357 if allowwdir:
358 state = (state[0], ctx.subrev(path), state[2])
358 state = (state[0], ctx.subrev(path), state[2])
359 return types[state[2]](ctx, path, state[:2])
359 return types[state[2]](ctx, path, state[:2], allowcreate)
360
360
361 def nullsubrepo(ctx, path, pctx):
361 def nullsubrepo(ctx, path, pctx):
362 """return an empty subrepo in pctx for the extant subrepo in ctx"""
362 """return an empty subrepo in pctx for the extant subrepo in ctx"""
363 # subrepo inherently violates our import layering rules
363 # subrepo inherently violates our import layering rules
364 # because it wants to make repo objects from deep inside the stack
364 # because it wants to make repo objects from deep inside the stack
365 # so we manually delay the circular imports to not break
365 # so we manually delay the circular imports to not break
366 # scripts that don't use our demand-loading
366 # scripts that don't use our demand-loading
367 global hg
367 global hg
368 from . import hg as h
368 from . import hg as h
369 hg = h
369 hg = h
370
370
371 pathutil.pathauditor(ctx.repo().root)(path)
371 pathutil.pathauditor(ctx.repo().root)(path)
372 state = ctx.substate[path]
372 state = ctx.substate[path]
373 if state[2] not in types:
373 if state[2] not in types:
374 raise error.Abort(_('unknown subrepo type %s') % state[2])
374 raise error.Abort(_('unknown subrepo type %s') % state[2])
375 subrev = ''
375 subrev = ''
376 if state[2] == 'hg':
376 if state[2] == 'hg':
377 subrev = "0" * 40
377 subrev = "0" * 40
378 return types[state[2]](pctx, path, (state[0], subrev))
378 return types[state[2]](pctx, path, (state[0], subrev), True)
379
379
380 def newcommitphase(ui, ctx):
380 def newcommitphase(ui, ctx):
381 commitphase = phases.newcommitphase(ui)
381 commitphase = phases.newcommitphase(ui)
382 substate = getattr(ctx, "substate", None)
382 substate = getattr(ctx, "substate", None)
383 if not substate:
383 if not substate:
384 return commitphase
384 return commitphase
385 check = ui.config('phases', 'checksubrepos', 'follow')
385 check = ui.config('phases', 'checksubrepos', 'follow')
386 if check not in ('ignore', 'follow', 'abort'):
386 if check not in ('ignore', 'follow', 'abort'):
387 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
387 raise error.Abort(_('invalid phases.checksubrepos configuration: %s')
388 % (check))
388 % (check))
389 if check == 'ignore':
389 if check == 'ignore':
390 return commitphase
390 return commitphase
391 maxphase = phases.public
391 maxphase = phases.public
392 maxsub = None
392 maxsub = None
393 for s in sorted(substate):
393 for s in sorted(substate):
394 sub = ctx.sub(s)
394 sub = ctx.sub(s)
395 subphase = sub.phase(substate[s][1])
395 subphase = sub.phase(substate[s][1])
396 if maxphase < subphase:
396 if maxphase < subphase:
397 maxphase = subphase
397 maxphase = subphase
398 maxsub = s
398 maxsub = s
399 if commitphase < maxphase:
399 if commitphase < maxphase:
400 if check == 'abort':
400 if check == 'abort':
401 raise error.Abort(_("can't commit in %s phase"
401 raise error.Abort(_("can't commit in %s phase"
402 " conflicting %s from subrepository %s") %
402 " conflicting %s from subrepository %s") %
403 (phases.phasenames[commitphase],
403 (phases.phasenames[commitphase],
404 phases.phasenames[maxphase], maxsub))
404 phases.phasenames[maxphase], maxsub))
405 ui.warn(_("warning: changes are committed in"
405 ui.warn(_("warning: changes are committed in"
406 " %s phase from subrepository %s\n") %
406 " %s phase from subrepository %s\n") %
407 (phases.phasenames[maxphase], maxsub))
407 (phases.phasenames[maxphase], maxsub))
408 return maxphase
408 return maxphase
409 return commitphase
409 return commitphase
410
410
411 # subrepo classes need to implement the following abstract class:
411 # subrepo classes need to implement the following abstract class:
412
412
413 class abstractsubrepo(object):
413 class abstractsubrepo(object):
414
414
415 def __init__(self, ctx, path):
415 def __init__(self, ctx, path):
416 """Initialize abstractsubrepo part
416 """Initialize abstractsubrepo part
417
417
418 ``ctx`` is the context referring this subrepository in the
418 ``ctx`` is the context referring this subrepository in the
419 parent repository.
419 parent repository.
420
420
421 ``path`` is the path to this subrepository as seen from
421 ``path`` is the path to this subrepository as seen from
422 innermost repository.
422 innermost repository.
423 """
423 """
424 self.ui = ctx.repo().ui
424 self.ui = ctx.repo().ui
425 self._ctx = ctx
425 self._ctx = ctx
426 self._path = path
426 self._path = path
427
427
428 def storeclean(self, path):
428 def storeclean(self, path):
429 """
429 """
430 returns true if the repository has not changed since it was last
430 returns true if the repository has not changed since it was last
431 cloned from or pushed to a given repository.
431 cloned from or pushed to a given repository.
432 """
432 """
433 return False
433 return False
434
434
435 def dirty(self, ignoreupdate=False):
435 def dirty(self, ignoreupdate=False):
436 """returns true if the dirstate of the subrepo is dirty or does not
436 """returns true if the dirstate of the subrepo is dirty or does not
437 match current stored state. If ignoreupdate is true, only check
437 match current stored state. If ignoreupdate is true, only check
438 whether the subrepo has uncommitted changes in its dirstate.
438 whether the subrepo has uncommitted changes in its dirstate.
439 """
439 """
440 raise NotImplementedError
440 raise NotImplementedError
441
441
442 def dirtyreason(self, ignoreupdate=False):
442 def dirtyreason(self, ignoreupdate=False):
443 """return reason string if it is ``dirty()``
443 """return reason string if it is ``dirty()``
444
444
445 Returned string should have enough information for the message
445 Returned string should have enough information for the message
446 of exception.
446 of exception.
447
447
448 This returns None, otherwise.
448 This returns None, otherwise.
449 """
449 """
450 if self.dirty(ignoreupdate=ignoreupdate):
450 if self.dirty(ignoreupdate=ignoreupdate):
451 return _("uncommitted changes in subrepository '%s'"
451 return _("uncommitted changes in subrepository '%s'"
452 ) % subrelpath(self)
452 ) % subrelpath(self)
453
453
454 def bailifchanged(self, ignoreupdate=False):
454 def bailifchanged(self, ignoreupdate=False):
455 """raise Abort if subrepository is ``dirty()``
455 """raise Abort if subrepository is ``dirty()``
456 """
456 """
457 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
457 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate)
458 if dirtyreason:
458 if dirtyreason:
459 raise error.Abort(dirtyreason)
459 raise error.Abort(dirtyreason)
460
460
461 def basestate(self):
461 def basestate(self):
462 """current working directory base state, disregarding .hgsubstate
462 """current working directory base state, disregarding .hgsubstate
463 state and working directory modifications"""
463 state and working directory modifications"""
464 raise NotImplementedError
464 raise NotImplementedError
465
465
466 def checknested(self, path):
466 def checknested(self, path):
467 """check if path is a subrepository within this repository"""
467 """check if path is a subrepository within this repository"""
468 return False
468 return False
469
469
470 def commit(self, text, user, date):
470 def commit(self, text, user, date):
471 """commit the current changes to the subrepo with the given
471 """commit the current changes to the subrepo with the given
472 log message. Use given user and date if possible. Return the
472 log message. Use given user and date if possible. Return the
473 new state of the subrepo.
473 new state of the subrepo.
474 """
474 """
475 raise NotImplementedError
475 raise NotImplementedError
476
476
477 def phase(self, state):
477 def phase(self, state):
478 """returns phase of specified state in the subrepository.
478 """returns phase of specified state in the subrepository.
479 """
479 """
480 return phases.public
480 return phases.public
481
481
482 def remove(self):
482 def remove(self):
483 """remove the subrepo
483 """remove the subrepo
484
484
485 (should verify the dirstate is not dirty first)
485 (should verify the dirstate is not dirty first)
486 """
486 """
487 raise NotImplementedError
487 raise NotImplementedError
488
488
489 def get(self, state, overwrite=False):
489 def get(self, state, overwrite=False):
490 """run whatever commands are needed to put the subrepo into
490 """run whatever commands are needed to put the subrepo into
491 this state
491 this state
492 """
492 """
493 raise NotImplementedError
493 raise NotImplementedError
494
494
495 def merge(self, state):
495 def merge(self, state):
496 """merge currently-saved state with the new state."""
496 """merge currently-saved state with the new state."""
497 raise NotImplementedError
497 raise NotImplementedError
498
498
499 def push(self, opts):
499 def push(self, opts):
500 """perform whatever action is analogous to 'hg push'
500 """perform whatever action is analogous to 'hg push'
501
501
502 This may be a no-op on some systems.
502 This may be a no-op on some systems.
503 """
503 """
504 raise NotImplementedError
504 raise NotImplementedError
505
505
506 def add(self, ui, match, prefix, explicitonly, **opts):
506 def add(self, ui, match, prefix, explicitonly, **opts):
507 return []
507 return []
508
508
509 def addremove(self, matcher, prefix, opts, dry_run, similarity):
509 def addremove(self, matcher, prefix, opts, dry_run, similarity):
510 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
510 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
511 return 1
511 return 1
512
512
513 def cat(self, match, prefix, **opts):
513 def cat(self, match, prefix, **opts):
514 return 1
514 return 1
515
515
516 def status(self, rev2, **opts):
516 def status(self, rev2, **opts):
517 return scmutil.status([], [], [], [], [], [], [])
517 return scmutil.status([], [], [], [], [], [], [])
518
518
519 def diff(self, ui, diffopts, node2, match, prefix, **opts):
519 def diff(self, ui, diffopts, node2, match, prefix, **opts):
520 pass
520 pass
521
521
522 def outgoing(self, ui, dest, opts):
522 def outgoing(self, ui, dest, opts):
523 return 1
523 return 1
524
524
525 def incoming(self, ui, source, opts):
525 def incoming(self, ui, source, opts):
526 return 1
526 return 1
527
527
528 def files(self):
528 def files(self):
529 """return filename iterator"""
529 """return filename iterator"""
530 raise NotImplementedError
530 raise NotImplementedError
531
531
532 def filedata(self, name):
532 def filedata(self, name):
533 """return file data"""
533 """return file data"""
534 raise NotImplementedError
534 raise NotImplementedError
535
535
536 def fileflags(self, name):
536 def fileflags(self, name):
537 """return file flags"""
537 """return file flags"""
538 return ''
538 return ''
539
539
540 def getfileset(self, expr):
540 def getfileset(self, expr):
541 """Resolve the fileset expression for this repo"""
541 """Resolve the fileset expression for this repo"""
542 return set()
542 return set()
543
543
544 def printfiles(self, ui, m, fm, fmt, subrepos):
544 def printfiles(self, ui, m, fm, fmt, subrepos):
545 """handle the files command for this subrepo"""
545 """handle the files command for this subrepo"""
546 return 1
546 return 1
547
547
548 def archive(self, archiver, prefix, match=None):
548 def archive(self, archiver, prefix, match=None):
549 if match is not None:
549 if match is not None:
550 files = [f for f in self.files() if match(f)]
550 files = [f for f in self.files() if match(f)]
551 else:
551 else:
552 files = self.files()
552 files = self.files()
553 total = len(files)
553 total = len(files)
554 relpath = subrelpath(self)
554 relpath = subrelpath(self)
555 self.ui.progress(_('archiving (%s)') % relpath, 0,
555 self.ui.progress(_('archiving (%s)') % relpath, 0,
556 unit=_('files'), total=total)
556 unit=_('files'), total=total)
557 for i, name in enumerate(files):
557 for i, name in enumerate(files):
558 flags = self.fileflags(name)
558 flags = self.fileflags(name)
559 mode = 'x' in flags and 0o755 or 0o644
559 mode = 'x' in flags and 0o755 or 0o644
560 symlink = 'l' in flags
560 symlink = 'l' in flags
561 archiver.addfile(prefix + self._path + '/' + name,
561 archiver.addfile(prefix + self._path + '/' + name,
562 mode, symlink, self.filedata(name))
562 mode, symlink, self.filedata(name))
563 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
563 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
564 unit=_('files'), total=total)
564 unit=_('files'), total=total)
565 self.ui.progress(_('archiving (%s)') % relpath, None)
565 self.ui.progress(_('archiving (%s)') % relpath, None)
566 return total
566 return total
567
567
568 def walk(self, match):
568 def walk(self, match):
569 '''
569 '''
570 walk recursively through the directory tree, finding all files
570 walk recursively through the directory tree, finding all files
571 matched by the match function
571 matched by the match function
572 '''
572 '''
573 pass
573 pass
574
574
575 def forget(self, match, prefix):
575 def forget(self, match, prefix):
576 return ([], [])
576 return ([], [])
577
577
578 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
578 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
579 """remove the matched files from the subrepository and the filesystem,
579 """remove the matched files from the subrepository and the filesystem,
580 possibly by force and/or after the file has been removed from the
580 possibly by force and/or after the file has been removed from the
581 filesystem. Return 0 on success, 1 on any warning.
581 filesystem. Return 0 on success, 1 on any warning.
582 """
582 """
583 warnings.append(_("warning: removefiles not implemented (%s)")
583 warnings.append(_("warning: removefiles not implemented (%s)")
584 % self._path)
584 % self._path)
585 return 1
585 return 1
586
586
587 def revert(self, substate, *pats, **opts):
587 def revert(self, substate, *pats, **opts):
588 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
588 self.ui.warn('%s: reverting %s subrepos is unsupported\n' \
589 % (substate[0], substate[2]))
589 % (substate[0], substate[2]))
590 return []
590 return []
591
591
592 def shortid(self, revid):
592 def shortid(self, revid):
593 return revid
593 return revid
594
594
595 def verify(self):
595 def verify(self):
596 '''verify the integrity of the repository. Return 0 on success or
596 '''verify the integrity of the repository. Return 0 on success or
597 warning, 1 on any error.
597 warning, 1 on any error.
598 '''
598 '''
599 return 0
599 return 0
600
600
601 @propertycache
601 @propertycache
602 def wvfs(self):
602 def wvfs(self):
603 """return vfs to access the working directory of this subrepository
603 """return vfs to access the working directory of this subrepository
604 """
604 """
605 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
605 return scmutil.vfs(self._ctx.repo().wvfs.join(self._path))
606
606
607 @propertycache
607 @propertycache
608 def _relpath(self):
608 def _relpath(self):
609 """return path to this subrepository as seen from outermost repository
609 """return path to this subrepository as seen from outermost repository
610 """
610 """
611 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
611 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
612
612
613 class hgsubrepo(abstractsubrepo):
613 class hgsubrepo(abstractsubrepo):
614 def __init__(self, ctx, path, state):
614 def __init__(self, ctx, path, state, allowcreate):
615 super(hgsubrepo, self).__init__(ctx, path)
615 super(hgsubrepo, self).__init__(ctx, path)
616 self._state = state
616 self._state = state
617 r = ctx.repo()
617 r = ctx.repo()
618 root = r.wjoin(path)
618 root = r.wjoin(path)
619 create = not r.wvfs.exists('%s/.hg' % path)
619 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
620 self._repo = hg.repository(r.baseui, root, create=create)
620 self._repo = hg.repository(r.baseui, root, create=create)
621
621
622 # Propagate the parent's --hidden option
622 # Propagate the parent's --hidden option
623 if r is r.unfiltered():
623 if r is r.unfiltered():
624 self._repo = self._repo.unfiltered()
624 self._repo = self._repo.unfiltered()
625
625
626 self.ui = self._repo.ui
626 self.ui = self._repo.ui
627 for s, k in [('ui', 'commitsubrepos')]:
627 for s, k in [('ui', 'commitsubrepos')]:
628 v = r.ui.config(s, k)
628 v = r.ui.config(s, k)
629 if v:
629 if v:
630 self.ui.setconfig(s, k, v, 'subrepo')
630 self.ui.setconfig(s, k, v, 'subrepo')
631 # internal config: ui._usedassubrepo
631 # internal config: ui._usedassubrepo
632 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
632 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
633 self._initrepo(r, state[0], create)
633 self._initrepo(r, state[0], create)
634
634
635 def storeclean(self, path):
635 def storeclean(self, path):
636 with self._repo.lock():
636 with self._repo.lock():
637 return self._storeclean(path)
637 return self._storeclean(path)
638
638
639 def _storeclean(self, path):
639 def _storeclean(self, path):
640 clean = True
640 clean = True
641 itercache = self._calcstorehash(path)
641 itercache = self._calcstorehash(path)
642 for filehash in self._readstorehashcache(path):
642 for filehash in self._readstorehashcache(path):
643 if filehash != next(itercache, None):
643 if filehash != next(itercache, None):
644 clean = False
644 clean = False
645 break
645 break
646 if clean:
646 if clean:
647 # if not empty:
647 # if not empty:
648 # the cached and current pull states have a different size
648 # the cached and current pull states have a different size
649 clean = next(itercache, None) is None
649 clean = next(itercache, None) is None
650 return clean
650 return clean
651
651
652 def _calcstorehash(self, remotepath):
652 def _calcstorehash(self, remotepath):
653 '''calculate a unique "store hash"
653 '''calculate a unique "store hash"
654
654
655 This method is used to to detect when there are changes that may
655 This method is used to to detect when there are changes that may
656 require a push to a given remote path.'''
656 require a push to a given remote path.'''
657 # sort the files that will be hashed in increasing (likely) file size
657 # sort the files that will be hashed in increasing (likely) file size
658 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
658 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
659 yield '# %s\n' % _expandedabspath(remotepath)
659 yield '# %s\n' % _expandedabspath(remotepath)
660 vfs = self._repo.vfs
660 vfs = self._repo.vfs
661 for relname in filelist:
661 for relname in filelist:
662 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
662 filehash = util.sha1(vfs.tryread(relname)).hexdigest()
663 yield '%s = %s\n' % (relname, filehash)
663 yield '%s = %s\n' % (relname, filehash)
664
664
665 @propertycache
665 @propertycache
666 def _cachestorehashvfs(self):
666 def _cachestorehashvfs(self):
667 return scmutil.vfs(self._repo.join('cache/storehash'))
667 return scmutil.vfs(self._repo.join('cache/storehash'))
668
668
669 def _readstorehashcache(self, remotepath):
669 def _readstorehashcache(self, remotepath):
670 '''read the store hash cache for a given remote repository'''
670 '''read the store hash cache for a given remote repository'''
671 cachefile = _getstorehashcachename(remotepath)
671 cachefile = _getstorehashcachename(remotepath)
672 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
672 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
673
673
674 def _cachestorehash(self, remotepath):
674 def _cachestorehash(self, remotepath):
675 '''cache the current store hash
675 '''cache the current store hash
676
676
677 Each remote repo requires its own store hash cache, because a subrepo
677 Each remote repo requires its own store hash cache, because a subrepo
678 store may be "clean" versus a given remote repo, but not versus another
678 store may be "clean" versus a given remote repo, but not versus another
679 '''
679 '''
680 cachefile = _getstorehashcachename(remotepath)
680 cachefile = _getstorehashcachename(remotepath)
681 with self._repo.lock():
681 with self._repo.lock():
682 storehash = list(self._calcstorehash(remotepath))
682 storehash = list(self._calcstorehash(remotepath))
683 vfs = self._cachestorehashvfs
683 vfs = self._cachestorehashvfs
684 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
684 vfs.writelines(cachefile, storehash, mode='w', notindexed=True)
685
685
686 def _getctx(self):
686 def _getctx(self):
687 '''fetch the context for this subrepo revision, possibly a workingctx
687 '''fetch the context for this subrepo revision, possibly a workingctx
688 '''
688 '''
689 if self._ctx.rev() is None:
689 if self._ctx.rev() is None:
690 return self._repo[None] # workingctx if parent is workingctx
690 return self._repo[None] # workingctx if parent is workingctx
691 else:
691 else:
692 rev = self._state[1]
692 rev = self._state[1]
693 return self._repo[rev]
693 return self._repo[rev]
694
694
695 @annotatesubrepoerror
695 @annotatesubrepoerror
696 def _initrepo(self, parentrepo, source, create):
696 def _initrepo(self, parentrepo, source, create):
697 self._repo._subparent = parentrepo
697 self._repo._subparent = parentrepo
698 self._repo._subsource = source
698 self._repo._subsource = source
699
699
700 if create:
700 if create:
701 lines = ['[paths]\n']
701 lines = ['[paths]\n']
702
702
703 def addpathconfig(key, value):
703 def addpathconfig(key, value):
704 if value:
704 if value:
705 lines.append('%s = %s\n' % (key, value))
705 lines.append('%s = %s\n' % (key, value))
706 self.ui.setconfig('paths', key, value, 'subrepo')
706 self.ui.setconfig('paths', key, value, 'subrepo')
707
707
708 defpath = _abssource(self._repo, abort=False)
708 defpath = _abssource(self._repo, abort=False)
709 defpushpath = _abssource(self._repo, True, abort=False)
709 defpushpath = _abssource(self._repo, True, abort=False)
710 addpathconfig('default', defpath)
710 addpathconfig('default', defpath)
711 if defpath != defpushpath:
711 if defpath != defpushpath:
712 addpathconfig('default-push', defpushpath)
712 addpathconfig('default-push', defpushpath)
713
713
714 fp = self._repo.vfs("hgrc", "w", text=True)
714 fp = self._repo.vfs("hgrc", "w", text=True)
715 try:
715 try:
716 fp.write(''.join(lines))
716 fp.write(''.join(lines))
717 finally:
717 finally:
718 fp.close()
718 fp.close()
719
719
720 @annotatesubrepoerror
720 @annotatesubrepoerror
721 def add(self, ui, match, prefix, explicitonly, **opts):
721 def add(self, ui, match, prefix, explicitonly, **opts):
722 return cmdutil.add(ui, self._repo, match,
722 return cmdutil.add(ui, self._repo, match,
723 self.wvfs.reljoin(prefix, self._path),
723 self.wvfs.reljoin(prefix, self._path),
724 explicitonly, **opts)
724 explicitonly, **opts)
725
725
726 @annotatesubrepoerror
726 @annotatesubrepoerror
727 def addremove(self, m, prefix, opts, dry_run, similarity):
727 def addremove(self, m, prefix, opts, dry_run, similarity):
728 # In the same way as sub directories are processed, once in a subrepo,
728 # In the same way as sub directories are processed, once in a subrepo,
729 # always entry any of its subrepos. Don't corrupt the options that will
729 # always entry any of its subrepos. Don't corrupt the options that will
730 # be used to process sibling subrepos however.
730 # be used to process sibling subrepos however.
731 opts = copy.copy(opts)
731 opts = copy.copy(opts)
732 opts['subrepos'] = True
732 opts['subrepos'] = True
733 return scmutil.addremove(self._repo, m,
733 return scmutil.addremove(self._repo, m,
734 self.wvfs.reljoin(prefix, self._path), opts,
734 self.wvfs.reljoin(prefix, self._path), opts,
735 dry_run, similarity)
735 dry_run, similarity)
736
736
737 @annotatesubrepoerror
737 @annotatesubrepoerror
738 def cat(self, match, prefix, **opts):
738 def cat(self, match, prefix, **opts):
739 rev = self._state[1]
739 rev = self._state[1]
740 ctx = self._repo[rev]
740 ctx = self._repo[rev]
741 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
741 return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts)
742
742
743 @annotatesubrepoerror
743 @annotatesubrepoerror
744 def status(self, rev2, **opts):
744 def status(self, rev2, **opts):
745 try:
745 try:
746 rev1 = self._state[1]
746 rev1 = self._state[1]
747 ctx1 = self._repo[rev1]
747 ctx1 = self._repo[rev1]
748 ctx2 = self._repo[rev2]
748 ctx2 = self._repo[rev2]
749 return self._repo.status(ctx1, ctx2, **opts)
749 return self._repo.status(ctx1, ctx2, **opts)
750 except error.RepoLookupError as inst:
750 except error.RepoLookupError as inst:
751 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
751 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
752 % (inst, subrelpath(self)))
752 % (inst, subrelpath(self)))
753 return scmutil.status([], [], [], [], [], [], [])
753 return scmutil.status([], [], [], [], [], [], [])
754
754
755 @annotatesubrepoerror
755 @annotatesubrepoerror
756 def diff(self, ui, diffopts, node2, match, prefix, **opts):
756 def diff(self, ui, diffopts, node2, match, prefix, **opts):
757 try:
757 try:
758 node1 = node.bin(self._state[1])
758 node1 = node.bin(self._state[1])
759 # We currently expect node2 to come from substate and be
759 # We currently expect node2 to come from substate and be
760 # in hex format
760 # in hex format
761 if node2 is not None:
761 if node2 is not None:
762 node2 = node.bin(node2)
762 node2 = node.bin(node2)
763 cmdutil.diffordiffstat(ui, self._repo, diffopts,
763 cmdutil.diffordiffstat(ui, self._repo, diffopts,
764 node1, node2, match,
764 node1, node2, match,
765 prefix=posixpath.join(prefix, self._path),
765 prefix=posixpath.join(prefix, self._path),
766 listsubrepos=True, **opts)
766 listsubrepos=True, **opts)
767 except error.RepoLookupError as inst:
767 except error.RepoLookupError as inst:
768 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
768 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
769 % (inst, subrelpath(self)))
769 % (inst, subrelpath(self)))
770
770
771 @annotatesubrepoerror
771 @annotatesubrepoerror
772 def archive(self, archiver, prefix, match=None):
772 def archive(self, archiver, prefix, match=None):
773 self._get(self._state + ('hg',))
773 self._get(self._state + ('hg',))
774 total = abstractsubrepo.archive(self, archiver, prefix, match)
774 total = abstractsubrepo.archive(self, archiver, prefix, match)
775 rev = self._state[1]
775 rev = self._state[1]
776 ctx = self._repo[rev]
776 ctx = self._repo[rev]
777 for subpath in ctx.substate:
777 for subpath in ctx.substate:
778 s = subrepo(ctx, subpath, True)
778 s = subrepo(ctx, subpath, True)
779 submatch = matchmod.subdirmatcher(subpath, match)
779 submatch = matchmod.subdirmatcher(subpath, match)
780 total += s.archive(archiver, prefix + self._path + '/', submatch)
780 total += s.archive(archiver, prefix + self._path + '/', submatch)
781 return total
781 return total
782
782
783 @annotatesubrepoerror
783 @annotatesubrepoerror
784 def dirty(self, ignoreupdate=False):
784 def dirty(self, ignoreupdate=False):
785 r = self._state[1]
785 r = self._state[1]
786 if r == '' and not ignoreupdate: # no state recorded
786 if r == '' and not ignoreupdate: # no state recorded
787 return True
787 return True
788 w = self._repo[None]
788 w = self._repo[None]
789 if r != w.p1().hex() and not ignoreupdate:
789 if r != w.p1().hex() and not ignoreupdate:
790 # different version checked out
790 # different version checked out
791 return True
791 return True
792 return w.dirty() # working directory changed
792 return w.dirty() # working directory changed
793
793
794 def basestate(self):
794 def basestate(self):
795 return self._repo['.'].hex()
795 return self._repo['.'].hex()
796
796
797 def checknested(self, path):
797 def checknested(self, path):
798 return self._repo._checknested(self._repo.wjoin(path))
798 return self._repo._checknested(self._repo.wjoin(path))
799
799
800 @annotatesubrepoerror
800 @annotatesubrepoerror
801 def commit(self, text, user, date):
801 def commit(self, text, user, date):
802 # don't bother committing in the subrepo if it's only been
802 # don't bother committing in the subrepo if it's only been
803 # updated
803 # updated
804 if not self.dirty(True):
804 if not self.dirty(True):
805 return self._repo['.'].hex()
805 return self._repo['.'].hex()
806 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
806 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
807 n = self._repo.commit(text, user, date)
807 n = self._repo.commit(text, user, date)
808 if not n:
808 if not n:
809 return self._repo['.'].hex() # different version checked out
809 return self._repo['.'].hex() # different version checked out
810 return node.hex(n)
810 return node.hex(n)
811
811
812 @annotatesubrepoerror
812 @annotatesubrepoerror
813 def phase(self, state):
813 def phase(self, state):
814 return self._repo[state].phase()
814 return self._repo[state].phase()
815
815
816 @annotatesubrepoerror
816 @annotatesubrepoerror
817 def remove(self):
817 def remove(self):
818 # we can't fully delete the repository as it may contain
818 # we can't fully delete the repository as it may contain
819 # local-only history
819 # local-only history
820 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
820 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
821 hg.clean(self._repo, node.nullid, False)
821 hg.clean(self._repo, node.nullid, False)
822
822
823 def _get(self, state):
823 def _get(self, state):
824 source, revision, kind = state
824 source, revision, kind = state
825 if revision in self._repo.unfiltered():
825 if revision in self._repo.unfiltered():
826 return True
826 return True
827 self._repo._subsource = source
827 self._repo._subsource = source
828 srcurl = _abssource(self._repo)
828 srcurl = _abssource(self._repo)
829 other = hg.peer(self._repo, {}, srcurl)
829 other = hg.peer(self._repo, {}, srcurl)
830 if len(self._repo) == 0:
830 if len(self._repo) == 0:
831 self.ui.status(_('cloning subrepo %s from %s\n')
831 self.ui.status(_('cloning subrepo %s from %s\n')
832 % (subrelpath(self), srcurl))
832 % (subrelpath(self), srcurl))
833 parentrepo = self._repo._subparent
833 parentrepo = self._repo._subparent
834 # use self._repo.vfs instead of self.wvfs to remove .hg only
834 # use self._repo.vfs instead of self.wvfs to remove .hg only
835 self._repo.vfs.rmtree()
835 self._repo.vfs.rmtree()
836 other, cloned = hg.clone(self._repo._subparent.baseui, {},
836 other, cloned = hg.clone(self._repo._subparent.baseui, {},
837 other, self._repo.root,
837 other, self._repo.root,
838 update=False)
838 update=False)
839 self._repo = cloned.local()
839 self._repo = cloned.local()
840 self._initrepo(parentrepo, source, create=True)
840 self._initrepo(parentrepo, source, create=True)
841 self._cachestorehash(srcurl)
841 self._cachestorehash(srcurl)
842 else:
842 else:
843 self.ui.status(_('pulling subrepo %s from %s\n')
843 self.ui.status(_('pulling subrepo %s from %s\n')
844 % (subrelpath(self), srcurl))
844 % (subrelpath(self), srcurl))
845 cleansub = self.storeclean(srcurl)
845 cleansub = self.storeclean(srcurl)
846 exchange.pull(self._repo, other)
846 exchange.pull(self._repo, other)
847 if cleansub:
847 if cleansub:
848 # keep the repo clean after pull
848 # keep the repo clean after pull
849 self._cachestorehash(srcurl)
849 self._cachestorehash(srcurl)
850 return False
850 return False
851
851
852 @annotatesubrepoerror
852 @annotatesubrepoerror
853 def get(self, state, overwrite=False):
853 def get(self, state, overwrite=False):
854 inrepo = self._get(state)
854 inrepo = self._get(state)
855 source, revision, kind = state
855 source, revision, kind = state
856 repo = self._repo
856 repo = self._repo
857 repo.ui.debug("getting subrepo %s\n" % self._path)
857 repo.ui.debug("getting subrepo %s\n" % self._path)
858 if inrepo:
858 if inrepo:
859 urepo = repo.unfiltered()
859 urepo = repo.unfiltered()
860 ctx = urepo[revision]
860 ctx = urepo[revision]
861 if ctx.hidden():
861 if ctx.hidden():
862 urepo.ui.warn(
862 urepo.ui.warn(
863 _('revision %s in subrepo %s is hidden\n') \
863 _('revision %s in subrepo %s is hidden\n') \
864 % (revision[0:12], self._path))
864 % (revision[0:12], self._path))
865 repo = urepo
865 repo = urepo
866 hg.updaterepo(repo, revision, overwrite)
866 hg.updaterepo(repo, revision, overwrite)
867
867
868 @annotatesubrepoerror
868 @annotatesubrepoerror
869 def merge(self, state):
869 def merge(self, state):
870 self._get(state)
870 self._get(state)
871 cur = self._repo['.']
871 cur = self._repo['.']
872 dst = self._repo[state[1]]
872 dst = self._repo[state[1]]
873 anc = dst.ancestor(cur)
873 anc = dst.ancestor(cur)
874
874
875 def mergefunc():
875 def mergefunc():
876 if anc == cur and dst.branch() == cur.branch():
876 if anc == cur and dst.branch() == cur.branch():
877 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
877 self.ui.debug("updating subrepo %s\n" % subrelpath(self))
878 hg.update(self._repo, state[1])
878 hg.update(self._repo, state[1])
879 elif anc == dst:
879 elif anc == dst:
880 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
880 self.ui.debug("skipping subrepo %s\n" % subrelpath(self))
881 else:
881 else:
882 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
882 self.ui.debug("merging subrepo %s\n" % subrelpath(self))
883 hg.merge(self._repo, state[1], remind=False)
883 hg.merge(self._repo, state[1], remind=False)
884
884
885 wctx = self._repo[None]
885 wctx = self._repo[None]
886 if self.dirty():
886 if self.dirty():
887 if anc != dst:
887 if anc != dst:
888 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
888 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
889 mergefunc()
889 mergefunc()
890 else:
890 else:
891 mergefunc()
891 mergefunc()
892 else:
892 else:
893 mergefunc()
893 mergefunc()
894
894
895 @annotatesubrepoerror
895 @annotatesubrepoerror
896 def push(self, opts):
896 def push(self, opts):
897 force = opts.get('force')
897 force = opts.get('force')
898 newbranch = opts.get('new_branch')
898 newbranch = opts.get('new_branch')
899 ssh = opts.get('ssh')
899 ssh = opts.get('ssh')
900
900
901 # push subrepos depth-first for coherent ordering
901 # push subrepos depth-first for coherent ordering
902 c = self._repo['']
902 c = self._repo['']
903 subs = c.substate # only repos that are committed
903 subs = c.substate # only repos that are committed
904 for s in sorted(subs):
904 for s in sorted(subs):
905 if c.sub(s).push(opts) == 0:
905 if c.sub(s).push(opts) == 0:
906 return False
906 return False
907
907
908 dsturl = _abssource(self._repo, True)
908 dsturl = _abssource(self._repo, True)
909 if not force:
909 if not force:
910 if self.storeclean(dsturl):
910 if self.storeclean(dsturl):
911 self.ui.status(
911 self.ui.status(
912 _('no changes made to subrepo %s since last push to %s\n')
912 _('no changes made to subrepo %s since last push to %s\n')
913 % (subrelpath(self), dsturl))
913 % (subrelpath(self), dsturl))
914 return None
914 return None
915 self.ui.status(_('pushing subrepo %s to %s\n') %
915 self.ui.status(_('pushing subrepo %s to %s\n') %
916 (subrelpath(self), dsturl))
916 (subrelpath(self), dsturl))
917 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
917 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
918 res = exchange.push(self._repo, other, force, newbranch=newbranch)
918 res = exchange.push(self._repo, other, force, newbranch=newbranch)
919
919
920 # the repo is now clean
920 # the repo is now clean
921 self._cachestorehash(dsturl)
921 self._cachestorehash(dsturl)
922 return res.cgresult
922 return res.cgresult
923
923
924 @annotatesubrepoerror
924 @annotatesubrepoerror
925 def outgoing(self, ui, dest, opts):
925 def outgoing(self, ui, dest, opts):
926 if 'rev' in opts or 'branch' in opts:
926 if 'rev' in opts or 'branch' in opts:
927 opts = copy.copy(opts)
927 opts = copy.copy(opts)
928 opts.pop('rev', None)
928 opts.pop('rev', None)
929 opts.pop('branch', None)
929 opts.pop('branch', None)
930 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
930 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
931
931
932 @annotatesubrepoerror
932 @annotatesubrepoerror
933 def incoming(self, ui, source, opts):
933 def incoming(self, ui, source, opts):
934 if 'rev' in opts or 'branch' in opts:
934 if 'rev' in opts or 'branch' in opts:
935 opts = copy.copy(opts)
935 opts = copy.copy(opts)
936 opts.pop('rev', None)
936 opts.pop('rev', None)
937 opts.pop('branch', None)
937 opts.pop('branch', None)
938 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
938 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
939
939
940 @annotatesubrepoerror
940 @annotatesubrepoerror
941 def files(self):
941 def files(self):
942 rev = self._state[1]
942 rev = self._state[1]
943 ctx = self._repo[rev]
943 ctx = self._repo[rev]
944 return ctx.manifest().keys()
944 return ctx.manifest().keys()
945
945
946 def filedata(self, name):
946 def filedata(self, name):
947 rev = self._state[1]
947 rev = self._state[1]
948 return self._repo[rev][name].data()
948 return self._repo[rev][name].data()
949
949
950 def fileflags(self, name):
950 def fileflags(self, name):
951 rev = self._state[1]
951 rev = self._state[1]
952 ctx = self._repo[rev]
952 ctx = self._repo[rev]
953 return ctx.flags(name)
953 return ctx.flags(name)
954
954
955 @annotatesubrepoerror
955 @annotatesubrepoerror
956 def printfiles(self, ui, m, fm, fmt, subrepos):
956 def printfiles(self, ui, m, fm, fmt, subrepos):
957 # If the parent context is a workingctx, use the workingctx here for
957 # If the parent context is a workingctx, use the workingctx here for
958 # consistency.
958 # consistency.
959 if self._ctx.rev() is None:
959 if self._ctx.rev() is None:
960 ctx = self._repo[None]
960 ctx = self._repo[None]
961 else:
961 else:
962 rev = self._state[1]
962 rev = self._state[1]
963 ctx = self._repo[rev]
963 ctx = self._repo[rev]
964 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
964 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
965
965
966 @annotatesubrepoerror
966 @annotatesubrepoerror
967 def getfileset(self, expr):
967 def getfileset(self, expr):
968 if self._ctx.rev() is None:
968 if self._ctx.rev() is None:
969 ctx = self._repo[None]
969 ctx = self._repo[None]
970 else:
970 else:
971 rev = self._state[1]
971 rev = self._state[1]
972 ctx = self._repo[rev]
972 ctx = self._repo[rev]
973
973
974 files = ctx.getfileset(expr)
974 files = ctx.getfileset(expr)
975
975
976 for subpath in ctx.substate:
976 for subpath in ctx.substate:
977 sub = ctx.sub(subpath)
977 sub = ctx.sub(subpath)
978
978
979 try:
979 try:
980 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
980 files.extend(subpath + '/' + f for f in sub.getfileset(expr))
981 except error.LookupError:
981 except error.LookupError:
982 self.ui.status(_("skipping missing subrepository: %s\n")
982 self.ui.status(_("skipping missing subrepository: %s\n")
983 % self.wvfs.reljoin(reporelpath(self), subpath))
983 % self.wvfs.reljoin(reporelpath(self), subpath))
984 return files
984 return files
985
985
986 def walk(self, match):
986 def walk(self, match):
987 ctx = self._repo[None]
987 ctx = self._repo[None]
988 return ctx.walk(match)
988 return ctx.walk(match)
989
989
990 @annotatesubrepoerror
990 @annotatesubrepoerror
991 def forget(self, match, prefix):
991 def forget(self, match, prefix):
992 return cmdutil.forget(self.ui, self._repo, match,
992 return cmdutil.forget(self.ui, self._repo, match,
993 self.wvfs.reljoin(prefix, self._path), True)
993 self.wvfs.reljoin(prefix, self._path), True)
994
994
995 @annotatesubrepoerror
995 @annotatesubrepoerror
996 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
996 def removefiles(self, matcher, prefix, after, force, subrepos, warnings):
997 return cmdutil.remove(self.ui, self._repo, matcher,
997 return cmdutil.remove(self.ui, self._repo, matcher,
998 self.wvfs.reljoin(prefix, self._path),
998 self.wvfs.reljoin(prefix, self._path),
999 after, force, subrepos)
999 after, force, subrepos)
1000
1000
1001 @annotatesubrepoerror
1001 @annotatesubrepoerror
1002 def revert(self, substate, *pats, **opts):
1002 def revert(self, substate, *pats, **opts):
1003 # reverting a subrepo is a 2 step process:
1003 # reverting a subrepo is a 2 step process:
1004 # 1. if the no_backup is not set, revert all modified
1004 # 1. if the no_backup is not set, revert all modified
1005 # files inside the subrepo
1005 # files inside the subrepo
1006 # 2. update the subrepo to the revision specified in
1006 # 2. update the subrepo to the revision specified in
1007 # the corresponding substate dictionary
1007 # the corresponding substate dictionary
1008 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1008 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1009 if not opts.get('no_backup'):
1009 if not opts.get('no_backup'):
1010 # Revert all files on the subrepo, creating backups
1010 # Revert all files on the subrepo, creating backups
1011 # Note that this will not recursively revert subrepos
1011 # Note that this will not recursively revert subrepos
1012 # We could do it if there was a set:subrepos() predicate
1012 # We could do it if there was a set:subrepos() predicate
1013 opts = opts.copy()
1013 opts = opts.copy()
1014 opts['date'] = None
1014 opts['date'] = None
1015 opts['rev'] = substate[1]
1015 opts['rev'] = substate[1]
1016
1016
1017 self.filerevert(*pats, **opts)
1017 self.filerevert(*pats, **opts)
1018
1018
1019 # Update the repo to the revision specified in the given substate
1019 # Update the repo to the revision specified in the given substate
1020 if not opts.get('dry_run'):
1020 if not opts.get('dry_run'):
1021 self.get(substate, overwrite=True)
1021 self.get(substate, overwrite=True)
1022
1022
1023 def filerevert(self, *pats, **opts):
1023 def filerevert(self, *pats, **opts):
1024 ctx = self._repo[opts['rev']]
1024 ctx = self._repo[opts['rev']]
1025 parents = self._repo.dirstate.parents()
1025 parents = self._repo.dirstate.parents()
1026 if opts.get('all'):
1026 if opts.get('all'):
1027 pats = ['set:modified()']
1027 pats = ['set:modified()']
1028 else:
1028 else:
1029 pats = []
1029 pats = []
1030 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1030 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
1031
1031
1032 def shortid(self, revid):
1032 def shortid(self, revid):
1033 return revid[:12]
1033 return revid[:12]
1034
1034
1035 def verify(self):
1035 def verify(self):
1036 try:
1036 try:
1037 rev = self._state[1]
1037 rev = self._state[1]
1038 ctx = self._repo.unfiltered()[rev]
1038 ctx = self._repo.unfiltered()[rev]
1039 if ctx.hidden():
1039 if ctx.hidden():
1040 # Since hidden revisions aren't pushed/pulled, it seems worth an
1040 # Since hidden revisions aren't pushed/pulled, it seems worth an
1041 # explicit warning.
1041 # explicit warning.
1042 ui = self._repo.ui
1042 ui = self._repo.ui
1043 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1043 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
1044 (self._relpath, node.short(self._ctx.node())))
1044 (self._relpath, node.short(self._ctx.node())))
1045 return 0
1045 return 0
1046 except error.RepoLookupError:
1046 except error.RepoLookupError:
1047 # A missing subrepo revision may be a case of needing to pull it, so
1047 # A missing subrepo revision may be a case of needing to pull it, so
1048 # don't treat this as an error.
1048 # don't treat this as an error.
1049 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1049 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
1050 (self._relpath, node.short(self._ctx.node())))
1050 (self._relpath, node.short(self._ctx.node())))
1051 return 0
1051 return 0
1052
1052
1053 @propertycache
1053 @propertycache
1054 def wvfs(self):
1054 def wvfs(self):
1055 """return own wvfs for efficiency and consistency
1055 """return own wvfs for efficiency and consistency
1056 """
1056 """
1057 return self._repo.wvfs
1057 return self._repo.wvfs
1058
1058
1059 @propertycache
1059 @propertycache
1060 def _relpath(self):
1060 def _relpath(self):
1061 """return path to this subrepository as seen from outermost repository
1061 """return path to this subrepository as seen from outermost repository
1062 """
1062 """
1063 # Keep consistent dir separators by avoiding vfs.join(self._path)
1063 # Keep consistent dir separators by avoiding vfs.join(self._path)
1064 return reporelpath(self._repo)
1064 return reporelpath(self._repo)
1065
1065
1066 class svnsubrepo(abstractsubrepo):
1066 class svnsubrepo(abstractsubrepo):
1067 def __init__(self, ctx, path, state):
1067 def __init__(self, ctx, path, state, allowcreate):
1068 super(svnsubrepo, self).__init__(ctx, path)
1068 super(svnsubrepo, self).__init__(ctx, path)
1069 self._state = state
1069 self._state = state
1070 self._exe = util.findexe('svn')
1070 self._exe = util.findexe('svn')
1071 if not self._exe:
1071 if not self._exe:
1072 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1072 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
1073 % self._path)
1073 % self._path)
1074
1074
1075 def _svncommand(self, commands, filename='', failok=False):
1075 def _svncommand(self, commands, filename='', failok=False):
1076 cmd = [self._exe]
1076 cmd = [self._exe]
1077 extrakw = {}
1077 extrakw = {}
1078 if not self.ui.interactive():
1078 if not self.ui.interactive():
1079 # Making stdin be a pipe should prevent svn from behaving
1079 # Making stdin be a pipe should prevent svn from behaving
1080 # interactively even if we can't pass --non-interactive.
1080 # interactively even if we can't pass --non-interactive.
1081 extrakw['stdin'] = subprocess.PIPE
1081 extrakw['stdin'] = subprocess.PIPE
1082 # Starting in svn 1.5 --non-interactive is a global flag
1082 # Starting in svn 1.5 --non-interactive is a global flag
1083 # instead of being per-command, but we need to support 1.4 so
1083 # instead of being per-command, but we need to support 1.4 so
1084 # we have to be intelligent about what commands take
1084 # we have to be intelligent about what commands take
1085 # --non-interactive.
1085 # --non-interactive.
1086 if commands[0] in ('update', 'checkout', 'commit'):
1086 if commands[0] in ('update', 'checkout', 'commit'):
1087 cmd.append('--non-interactive')
1087 cmd.append('--non-interactive')
1088 cmd.extend(commands)
1088 cmd.extend(commands)
1089 if filename is not None:
1089 if filename is not None:
1090 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1090 path = self.wvfs.reljoin(self._ctx.repo().origroot,
1091 self._path, filename)
1091 self._path, filename)
1092 cmd.append(path)
1092 cmd.append(path)
1093 env = dict(os.environ)
1093 env = dict(os.environ)
1094 # Avoid localized output, preserve current locale for everything else.
1094 # Avoid localized output, preserve current locale for everything else.
1095 lc_all = env.get('LC_ALL')
1095 lc_all = env.get('LC_ALL')
1096 if lc_all:
1096 if lc_all:
1097 env['LANG'] = lc_all
1097 env['LANG'] = lc_all
1098 del env['LC_ALL']
1098 del env['LC_ALL']
1099 env['LC_MESSAGES'] = 'C'
1099 env['LC_MESSAGES'] = 'C'
1100 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1100 p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds,
1101 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1101 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
1102 universal_newlines=True, env=env, **extrakw)
1102 universal_newlines=True, env=env, **extrakw)
1103 stdout, stderr = p.communicate()
1103 stdout, stderr = p.communicate()
1104 stderr = stderr.strip()
1104 stderr = stderr.strip()
1105 if not failok:
1105 if not failok:
1106 if p.returncode:
1106 if p.returncode:
1107 raise error.Abort(stderr or 'exited with code %d'
1107 raise error.Abort(stderr or 'exited with code %d'
1108 % p.returncode)
1108 % p.returncode)
1109 if stderr:
1109 if stderr:
1110 self.ui.warn(stderr + '\n')
1110 self.ui.warn(stderr + '\n')
1111 return stdout, stderr
1111 return stdout, stderr
1112
1112
1113 @propertycache
1113 @propertycache
1114 def _svnversion(self):
1114 def _svnversion(self):
1115 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1115 output, err = self._svncommand(['--version', '--quiet'], filename=None)
1116 m = re.search(r'^(\d+)\.(\d+)', output)
1116 m = re.search(r'^(\d+)\.(\d+)', output)
1117 if not m:
1117 if not m:
1118 raise error.Abort(_('cannot retrieve svn tool version'))
1118 raise error.Abort(_('cannot retrieve svn tool version'))
1119 return (int(m.group(1)), int(m.group(2)))
1119 return (int(m.group(1)), int(m.group(2)))
1120
1120
1121 def _wcrevs(self):
1121 def _wcrevs(self):
1122 # Get the working directory revision as well as the last
1122 # Get the working directory revision as well as the last
1123 # commit revision so we can compare the subrepo state with
1123 # commit revision so we can compare the subrepo state with
1124 # both. We used to store the working directory one.
1124 # both. We used to store the working directory one.
1125 output, err = self._svncommand(['info', '--xml'])
1125 output, err = self._svncommand(['info', '--xml'])
1126 doc = xml.dom.minidom.parseString(output)
1126 doc = xml.dom.minidom.parseString(output)
1127 entries = doc.getElementsByTagName('entry')
1127 entries = doc.getElementsByTagName('entry')
1128 lastrev, rev = '0', '0'
1128 lastrev, rev = '0', '0'
1129 if entries:
1129 if entries:
1130 rev = str(entries[0].getAttribute('revision')) or '0'
1130 rev = str(entries[0].getAttribute('revision')) or '0'
1131 commits = entries[0].getElementsByTagName('commit')
1131 commits = entries[0].getElementsByTagName('commit')
1132 if commits:
1132 if commits:
1133 lastrev = str(commits[0].getAttribute('revision')) or '0'
1133 lastrev = str(commits[0].getAttribute('revision')) or '0'
1134 return (lastrev, rev)
1134 return (lastrev, rev)
1135
1135
1136 def _wcrev(self):
1136 def _wcrev(self):
1137 return self._wcrevs()[0]
1137 return self._wcrevs()[0]
1138
1138
1139 def _wcchanged(self):
1139 def _wcchanged(self):
1140 """Return (changes, extchanges, missing) where changes is True
1140 """Return (changes, extchanges, missing) where changes is True
1141 if the working directory was changed, extchanges is
1141 if the working directory was changed, extchanges is
1142 True if any of these changes concern an external entry and missing
1142 True if any of these changes concern an external entry and missing
1143 is True if any change is a missing entry.
1143 is True if any change is a missing entry.
1144 """
1144 """
1145 output, err = self._svncommand(['status', '--xml'])
1145 output, err = self._svncommand(['status', '--xml'])
1146 externals, changes, missing = [], [], []
1146 externals, changes, missing = [], [], []
1147 doc = xml.dom.minidom.parseString(output)
1147 doc = xml.dom.minidom.parseString(output)
1148 for e in doc.getElementsByTagName('entry'):
1148 for e in doc.getElementsByTagName('entry'):
1149 s = e.getElementsByTagName('wc-status')
1149 s = e.getElementsByTagName('wc-status')
1150 if not s:
1150 if not s:
1151 continue
1151 continue
1152 item = s[0].getAttribute('item')
1152 item = s[0].getAttribute('item')
1153 props = s[0].getAttribute('props')
1153 props = s[0].getAttribute('props')
1154 path = e.getAttribute('path')
1154 path = e.getAttribute('path')
1155 if item == 'external':
1155 if item == 'external':
1156 externals.append(path)
1156 externals.append(path)
1157 elif item == 'missing':
1157 elif item == 'missing':
1158 missing.append(path)
1158 missing.append(path)
1159 if (item not in ('', 'normal', 'unversioned', 'external')
1159 if (item not in ('', 'normal', 'unversioned', 'external')
1160 or props not in ('', 'none', 'normal')):
1160 or props not in ('', 'none', 'normal')):
1161 changes.append(path)
1161 changes.append(path)
1162 for path in changes:
1162 for path in changes:
1163 for ext in externals:
1163 for ext in externals:
1164 if path == ext or path.startswith(ext + os.sep):
1164 if path == ext or path.startswith(ext + os.sep):
1165 return True, True, bool(missing)
1165 return True, True, bool(missing)
1166 return bool(changes), False, bool(missing)
1166 return bool(changes), False, bool(missing)
1167
1167
1168 def dirty(self, ignoreupdate=False):
1168 def dirty(self, ignoreupdate=False):
1169 if not self._wcchanged()[0]:
1169 if not self._wcchanged()[0]:
1170 if self._state[1] in self._wcrevs() or ignoreupdate:
1170 if self._state[1] in self._wcrevs() or ignoreupdate:
1171 return False
1171 return False
1172 return True
1172 return True
1173
1173
1174 def basestate(self):
1174 def basestate(self):
1175 lastrev, rev = self._wcrevs()
1175 lastrev, rev = self._wcrevs()
1176 if lastrev != rev:
1176 if lastrev != rev:
1177 # Last committed rev is not the same than rev. We would
1177 # Last committed rev is not the same than rev. We would
1178 # like to take lastrev but we do not know if the subrepo
1178 # like to take lastrev but we do not know if the subrepo
1179 # URL exists at lastrev. Test it and fallback to rev it
1179 # URL exists at lastrev. Test it and fallback to rev it
1180 # is not there.
1180 # is not there.
1181 try:
1181 try:
1182 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1182 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1183 return lastrev
1183 return lastrev
1184 except error.Abort:
1184 except error.Abort:
1185 pass
1185 pass
1186 return rev
1186 return rev
1187
1187
1188 @annotatesubrepoerror
1188 @annotatesubrepoerror
1189 def commit(self, text, user, date):
1189 def commit(self, text, user, date):
1190 # user and date are out of our hands since svn is centralized
1190 # user and date are out of our hands since svn is centralized
1191 changed, extchanged, missing = self._wcchanged()
1191 changed, extchanged, missing = self._wcchanged()
1192 if not changed:
1192 if not changed:
1193 return self.basestate()
1193 return self.basestate()
1194 if extchanged:
1194 if extchanged:
1195 # Do not try to commit externals
1195 # Do not try to commit externals
1196 raise error.Abort(_('cannot commit svn externals'))
1196 raise error.Abort(_('cannot commit svn externals'))
1197 if missing:
1197 if missing:
1198 # svn can commit with missing entries but aborting like hg
1198 # svn can commit with missing entries but aborting like hg
1199 # seems a better approach.
1199 # seems a better approach.
1200 raise error.Abort(_('cannot commit missing svn entries'))
1200 raise error.Abort(_('cannot commit missing svn entries'))
1201 commitinfo, err = self._svncommand(['commit', '-m', text])
1201 commitinfo, err = self._svncommand(['commit', '-m', text])
1202 self.ui.status(commitinfo)
1202 self.ui.status(commitinfo)
1203 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1203 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1204 if not newrev:
1204 if not newrev:
1205 if not commitinfo.strip():
1205 if not commitinfo.strip():
1206 # Sometimes, our definition of "changed" differs from
1206 # Sometimes, our definition of "changed" differs from
1207 # svn one. For instance, svn ignores missing files
1207 # svn one. For instance, svn ignores missing files
1208 # when committing. If there are only missing files, no
1208 # when committing. If there are only missing files, no
1209 # commit is made, no output and no error code.
1209 # commit is made, no output and no error code.
1210 raise error.Abort(_('failed to commit svn changes'))
1210 raise error.Abort(_('failed to commit svn changes'))
1211 raise error.Abort(commitinfo.splitlines()[-1])
1211 raise error.Abort(commitinfo.splitlines()[-1])
1212 newrev = newrev.groups()[0]
1212 newrev = newrev.groups()[0]
1213 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1213 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1214 return newrev
1214 return newrev
1215
1215
1216 @annotatesubrepoerror
1216 @annotatesubrepoerror
1217 def remove(self):
1217 def remove(self):
1218 if self.dirty():
1218 if self.dirty():
1219 self.ui.warn(_('not removing repo %s because '
1219 self.ui.warn(_('not removing repo %s because '
1220 'it has changes.\n') % self._path)
1220 'it has changes.\n') % self._path)
1221 return
1221 return
1222 self.ui.note(_('removing subrepo %s\n') % self._path)
1222 self.ui.note(_('removing subrepo %s\n') % self._path)
1223
1223
1224 self.wvfs.rmtree(forcibly=True)
1224 self.wvfs.rmtree(forcibly=True)
1225 try:
1225 try:
1226 pwvfs = self._ctx.repo().wvfs
1226 pwvfs = self._ctx.repo().wvfs
1227 pwvfs.removedirs(pwvfs.dirname(self._path))
1227 pwvfs.removedirs(pwvfs.dirname(self._path))
1228 except OSError:
1228 except OSError:
1229 pass
1229 pass
1230
1230
1231 @annotatesubrepoerror
1231 @annotatesubrepoerror
1232 def get(self, state, overwrite=False):
1232 def get(self, state, overwrite=False):
1233 if overwrite:
1233 if overwrite:
1234 self._svncommand(['revert', '--recursive'])
1234 self._svncommand(['revert', '--recursive'])
1235 args = ['checkout']
1235 args = ['checkout']
1236 if self._svnversion >= (1, 5):
1236 if self._svnversion >= (1, 5):
1237 args.append('--force')
1237 args.append('--force')
1238 # The revision must be specified at the end of the URL to properly
1238 # The revision must be specified at the end of the URL to properly
1239 # update to a directory which has since been deleted and recreated.
1239 # update to a directory which has since been deleted and recreated.
1240 args.append('%s@%s' % (state[0], state[1]))
1240 args.append('%s@%s' % (state[0], state[1]))
1241 status, err = self._svncommand(args, failok=True)
1241 status, err = self._svncommand(args, failok=True)
1242 _sanitize(self.ui, self.wvfs, '.svn')
1242 _sanitize(self.ui, self.wvfs, '.svn')
1243 if not re.search('Checked out revision [0-9]+.', status):
1243 if not re.search('Checked out revision [0-9]+.', status):
1244 if ('is already a working copy for a different URL' in err
1244 if ('is already a working copy for a different URL' in err
1245 and (self._wcchanged()[:2] == (False, False))):
1245 and (self._wcchanged()[:2] == (False, False))):
1246 # obstructed but clean working copy, so just blow it away.
1246 # obstructed but clean working copy, so just blow it away.
1247 self.remove()
1247 self.remove()
1248 self.get(state, overwrite=False)
1248 self.get(state, overwrite=False)
1249 return
1249 return
1250 raise error.Abort((status or err).splitlines()[-1])
1250 raise error.Abort((status or err).splitlines()[-1])
1251 self.ui.status(status)
1251 self.ui.status(status)
1252
1252
1253 @annotatesubrepoerror
1253 @annotatesubrepoerror
1254 def merge(self, state):
1254 def merge(self, state):
1255 old = self._state[1]
1255 old = self._state[1]
1256 new = state[1]
1256 new = state[1]
1257 wcrev = self._wcrev()
1257 wcrev = self._wcrev()
1258 if new != wcrev:
1258 if new != wcrev:
1259 dirty = old == wcrev or self._wcchanged()[0]
1259 dirty = old == wcrev or self._wcchanged()[0]
1260 if _updateprompt(self.ui, self, dirty, wcrev, new):
1260 if _updateprompt(self.ui, self, dirty, wcrev, new):
1261 self.get(state, False)
1261 self.get(state, False)
1262
1262
1263 def push(self, opts):
1263 def push(self, opts):
1264 # push is a no-op for SVN
1264 # push is a no-op for SVN
1265 return True
1265 return True
1266
1266
1267 @annotatesubrepoerror
1267 @annotatesubrepoerror
1268 def files(self):
1268 def files(self):
1269 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1269 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1270 doc = xml.dom.minidom.parseString(output)
1270 doc = xml.dom.minidom.parseString(output)
1271 paths = []
1271 paths = []
1272 for e in doc.getElementsByTagName('entry'):
1272 for e in doc.getElementsByTagName('entry'):
1273 kind = str(e.getAttribute('kind'))
1273 kind = str(e.getAttribute('kind'))
1274 if kind != 'file':
1274 if kind != 'file':
1275 continue
1275 continue
1276 name = ''.join(c.data for c
1276 name = ''.join(c.data for c
1277 in e.getElementsByTagName('name')[0].childNodes
1277 in e.getElementsByTagName('name')[0].childNodes
1278 if c.nodeType == c.TEXT_NODE)
1278 if c.nodeType == c.TEXT_NODE)
1279 paths.append(name.encode('utf-8'))
1279 paths.append(name.encode('utf-8'))
1280 return paths
1280 return paths
1281
1281
1282 def filedata(self, name):
1282 def filedata(self, name):
1283 return self._svncommand(['cat'], name)[0]
1283 return self._svncommand(['cat'], name)[0]
1284
1284
1285
1285
1286 class gitsubrepo(abstractsubrepo):
1286 class gitsubrepo(abstractsubrepo):
1287 def __init__(self, ctx, path, state):
1287 def __init__(self, ctx, path, state, allowcreate):
1288 super(gitsubrepo, self).__init__(ctx, path)
1288 super(gitsubrepo, self).__init__(ctx, path)
1289 self._state = state
1289 self._state = state
1290 self._abspath = ctx.repo().wjoin(path)
1290 self._abspath = ctx.repo().wjoin(path)
1291 self._subparent = ctx.repo()
1291 self._subparent = ctx.repo()
1292 self._ensuregit()
1292 self._ensuregit()
1293
1293
1294 def _ensuregit(self):
1294 def _ensuregit(self):
1295 try:
1295 try:
1296 self._gitexecutable = 'git'
1296 self._gitexecutable = 'git'
1297 out, err = self._gitnodir(['--version'])
1297 out, err = self._gitnodir(['--version'])
1298 except OSError as e:
1298 except OSError as e:
1299 genericerror = _("error executing git for subrepo '%s': %s")
1299 genericerror = _("error executing git for subrepo '%s': %s")
1300 notfoundhint = _("check git is installed and in your PATH")
1300 notfoundhint = _("check git is installed and in your PATH")
1301 if e.errno != errno.ENOENT:
1301 if e.errno != errno.ENOENT:
1302 raise error.Abort(genericerror % (self._path, e.strerror))
1302 raise error.Abort(genericerror % (self._path, e.strerror))
1303 elif os.name == 'nt':
1303 elif os.name == 'nt':
1304 try:
1304 try:
1305 self._gitexecutable = 'git.cmd'
1305 self._gitexecutable = 'git.cmd'
1306 out, err = self._gitnodir(['--version'])
1306 out, err = self._gitnodir(['--version'])
1307 except OSError as e2:
1307 except OSError as e2:
1308 if e2.errno == errno.ENOENT:
1308 if e2.errno == errno.ENOENT:
1309 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1309 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1310 " for subrepo '%s'") % self._path,
1310 " for subrepo '%s'") % self._path,
1311 hint=notfoundhint)
1311 hint=notfoundhint)
1312 else:
1312 else:
1313 raise error.Abort(genericerror % (self._path,
1313 raise error.Abort(genericerror % (self._path,
1314 e2.strerror))
1314 e2.strerror))
1315 else:
1315 else:
1316 raise error.Abort(_("couldn't find git for subrepo '%s'")
1316 raise error.Abort(_("couldn't find git for subrepo '%s'")
1317 % self._path, hint=notfoundhint)
1317 % self._path, hint=notfoundhint)
1318 versionstatus = self._checkversion(out)
1318 versionstatus = self._checkversion(out)
1319 if versionstatus == 'unknown':
1319 if versionstatus == 'unknown':
1320 self.ui.warn(_('cannot retrieve git version\n'))
1320 self.ui.warn(_('cannot retrieve git version\n'))
1321 elif versionstatus == 'abort':
1321 elif versionstatus == 'abort':
1322 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1322 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1323 elif versionstatus == 'warning':
1323 elif versionstatus == 'warning':
1324 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1324 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1325
1325
1326 @staticmethod
1326 @staticmethod
1327 def _gitversion(out):
1327 def _gitversion(out):
1328 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1328 m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
1329 if m:
1329 if m:
1330 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1330 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1331
1331
1332 m = re.search(r'^git version (\d+)\.(\d+)', out)
1332 m = re.search(r'^git version (\d+)\.(\d+)', out)
1333 if m:
1333 if m:
1334 return (int(m.group(1)), int(m.group(2)), 0)
1334 return (int(m.group(1)), int(m.group(2)), 0)
1335
1335
1336 return -1
1336 return -1
1337
1337
1338 @staticmethod
1338 @staticmethod
1339 def _checkversion(out):
1339 def _checkversion(out):
1340 '''ensure git version is new enough
1340 '''ensure git version is new enough
1341
1341
1342 >>> _checkversion = gitsubrepo._checkversion
1342 >>> _checkversion = gitsubrepo._checkversion
1343 >>> _checkversion('git version 1.6.0')
1343 >>> _checkversion('git version 1.6.0')
1344 'ok'
1344 'ok'
1345 >>> _checkversion('git version 1.8.5')
1345 >>> _checkversion('git version 1.8.5')
1346 'ok'
1346 'ok'
1347 >>> _checkversion('git version 1.4.0')
1347 >>> _checkversion('git version 1.4.0')
1348 'abort'
1348 'abort'
1349 >>> _checkversion('git version 1.5.0')
1349 >>> _checkversion('git version 1.5.0')
1350 'warning'
1350 'warning'
1351 >>> _checkversion('git version 1.9-rc0')
1351 >>> _checkversion('git version 1.9-rc0')
1352 'ok'
1352 'ok'
1353 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1353 >>> _checkversion('git version 1.9.0.265.g81cdec2')
1354 'ok'
1354 'ok'
1355 >>> _checkversion('git version 1.9.0.GIT')
1355 >>> _checkversion('git version 1.9.0.GIT')
1356 'ok'
1356 'ok'
1357 >>> _checkversion('git version 12345')
1357 >>> _checkversion('git version 12345')
1358 'unknown'
1358 'unknown'
1359 >>> _checkversion('no')
1359 >>> _checkversion('no')
1360 'unknown'
1360 'unknown'
1361 '''
1361 '''
1362 version = gitsubrepo._gitversion(out)
1362 version = gitsubrepo._gitversion(out)
1363 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1363 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1364 # despite the docstring comment. For now, error on 1.4.0, warn on
1364 # despite the docstring comment. For now, error on 1.4.0, warn on
1365 # 1.5.0 but attempt to continue.
1365 # 1.5.0 but attempt to continue.
1366 if version == -1:
1366 if version == -1:
1367 return 'unknown'
1367 return 'unknown'
1368 if version < (1, 5, 0):
1368 if version < (1, 5, 0):
1369 return 'abort'
1369 return 'abort'
1370 elif version < (1, 6, 0):
1370 elif version < (1, 6, 0):
1371 return 'warning'
1371 return 'warning'
1372 return 'ok'
1372 return 'ok'
1373
1373
1374 def _gitcommand(self, commands, env=None, stream=False):
1374 def _gitcommand(self, commands, env=None, stream=False):
1375 return self._gitdir(commands, env=env, stream=stream)[0]
1375 return self._gitdir(commands, env=env, stream=stream)[0]
1376
1376
1377 def _gitdir(self, commands, env=None, stream=False):
1377 def _gitdir(self, commands, env=None, stream=False):
1378 return self._gitnodir(commands, env=env, stream=stream,
1378 return self._gitnodir(commands, env=env, stream=stream,
1379 cwd=self._abspath)
1379 cwd=self._abspath)
1380
1380
1381 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1381 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1382 """Calls the git command
1382 """Calls the git command
1383
1383
1384 The methods tries to call the git command. versions prior to 1.6.0
1384 The methods tries to call the git command. versions prior to 1.6.0
1385 are not supported and very probably fail.
1385 are not supported and very probably fail.
1386 """
1386 """
1387 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1387 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1388 if env is None:
1388 if env is None:
1389 env = os.environ.copy()
1389 env = os.environ.copy()
1390 # disable localization for Git output (issue5176)
1390 # disable localization for Git output (issue5176)
1391 env['LC_ALL'] = 'C'
1391 env['LC_ALL'] = 'C'
1392 # fix for Git CVE-2015-7545
1392 # fix for Git CVE-2015-7545
1393 if 'GIT_ALLOW_PROTOCOL' not in env:
1393 if 'GIT_ALLOW_PROTOCOL' not in env:
1394 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1394 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1395 # unless ui.quiet is set, print git's stderr,
1395 # unless ui.quiet is set, print git's stderr,
1396 # which is mostly progress and useful info
1396 # which is mostly progress and useful info
1397 errpipe = None
1397 errpipe = None
1398 if self.ui.quiet:
1398 if self.ui.quiet:
1399 errpipe = open(os.devnull, 'w')
1399 errpipe = open(os.devnull, 'w')
1400 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1400 p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
1401 cwd=cwd, env=env, close_fds=util.closefds,
1401 cwd=cwd, env=env, close_fds=util.closefds,
1402 stdout=subprocess.PIPE, stderr=errpipe)
1402 stdout=subprocess.PIPE, stderr=errpipe)
1403 if stream:
1403 if stream:
1404 return p.stdout, None
1404 return p.stdout, None
1405
1405
1406 retdata = p.stdout.read().strip()
1406 retdata = p.stdout.read().strip()
1407 # wait for the child to exit to avoid race condition.
1407 # wait for the child to exit to avoid race condition.
1408 p.wait()
1408 p.wait()
1409
1409
1410 if p.returncode != 0 and p.returncode != 1:
1410 if p.returncode != 0 and p.returncode != 1:
1411 # there are certain error codes that are ok
1411 # there are certain error codes that are ok
1412 command = commands[0]
1412 command = commands[0]
1413 if command in ('cat-file', 'symbolic-ref'):
1413 if command in ('cat-file', 'symbolic-ref'):
1414 return retdata, p.returncode
1414 return retdata, p.returncode
1415 # for all others, abort
1415 # for all others, abort
1416 raise error.Abort('git %s error %d in %s' %
1416 raise error.Abort('git %s error %d in %s' %
1417 (command, p.returncode, self._relpath))
1417 (command, p.returncode, self._relpath))
1418
1418
1419 return retdata, p.returncode
1419 return retdata, p.returncode
1420
1420
1421 def _gitmissing(self):
1421 def _gitmissing(self):
1422 return not self.wvfs.exists('.git')
1422 return not self.wvfs.exists('.git')
1423
1423
1424 def _gitstate(self):
1424 def _gitstate(self):
1425 return self._gitcommand(['rev-parse', 'HEAD'])
1425 return self._gitcommand(['rev-parse', 'HEAD'])
1426
1426
1427 def _gitcurrentbranch(self):
1427 def _gitcurrentbranch(self):
1428 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1428 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1429 if err:
1429 if err:
1430 current = None
1430 current = None
1431 return current
1431 return current
1432
1432
1433 def _gitremote(self, remote):
1433 def _gitremote(self, remote):
1434 out = self._gitcommand(['remote', 'show', '-n', remote])
1434 out = self._gitcommand(['remote', 'show', '-n', remote])
1435 line = out.split('\n')[1]
1435 line = out.split('\n')[1]
1436 i = line.index('URL: ') + len('URL: ')
1436 i = line.index('URL: ') + len('URL: ')
1437 return line[i:]
1437 return line[i:]
1438
1438
1439 def _githavelocally(self, revision):
1439 def _githavelocally(self, revision):
1440 out, code = self._gitdir(['cat-file', '-e', revision])
1440 out, code = self._gitdir(['cat-file', '-e', revision])
1441 return code == 0
1441 return code == 0
1442
1442
1443 def _gitisancestor(self, r1, r2):
1443 def _gitisancestor(self, r1, r2):
1444 base = self._gitcommand(['merge-base', r1, r2])
1444 base = self._gitcommand(['merge-base', r1, r2])
1445 return base == r1
1445 return base == r1
1446
1446
1447 def _gitisbare(self):
1447 def _gitisbare(self):
1448 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1448 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1449
1449
1450 def _gitupdatestat(self):
1450 def _gitupdatestat(self):
1451 """This must be run before git diff-index.
1451 """This must be run before git diff-index.
1452 diff-index only looks at changes to file stat;
1452 diff-index only looks at changes to file stat;
1453 this command looks at file contents and updates the stat."""
1453 this command looks at file contents and updates the stat."""
1454 self._gitcommand(['update-index', '-q', '--refresh'])
1454 self._gitcommand(['update-index', '-q', '--refresh'])
1455
1455
1456 def _gitbranchmap(self):
1456 def _gitbranchmap(self):
1457 '''returns 2 things:
1457 '''returns 2 things:
1458 a map from git branch to revision
1458 a map from git branch to revision
1459 a map from revision to branches'''
1459 a map from revision to branches'''
1460 branch2rev = {}
1460 branch2rev = {}
1461 rev2branch = {}
1461 rev2branch = {}
1462
1462
1463 out = self._gitcommand(['for-each-ref', '--format',
1463 out = self._gitcommand(['for-each-ref', '--format',
1464 '%(objectname) %(refname)'])
1464 '%(objectname) %(refname)'])
1465 for line in out.split('\n'):
1465 for line in out.split('\n'):
1466 revision, ref = line.split(' ')
1466 revision, ref = line.split(' ')
1467 if (not ref.startswith('refs/heads/') and
1467 if (not ref.startswith('refs/heads/') and
1468 not ref.startswith('refs/remotes/')):
1468 not ref.startswith('refs/remotes/')):
1469 continue
1469 continue
1470 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1470 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1471 continue # ignore remote/HEAD redirects
1471 continue # ignore remote/HEAD redirects
1472 branch2rev[ref] = revision
1472 branch2rev[ref] = revision
1473 rev2branch.setdefault(revision, []).append(ref)
1473 rev2branch.setdefault(revision, []).append(ref)
1474 return branch2rev, rev2branch
1474 return branch2rev, rev2branch
1475
1475
1476 def _gittracking(self, branches):
1476 def _gittracking(self, branches):
1477 'return map of remote branch to local tracking branch'
1477 'return map of remote branch to local tracking branch'
1478 # assumes no more than one local tracking branch for each remote
1478 # assumes no more than one local tracking branch for each remote
1479 tracking = {}
1479 tracking = {}
1480 for b in branches:
1480 for b in branches:
1481 if b.startswith('refs/remotes/'):
1481 if b.startswith('refs/remotes/'):
1482 continue
1482 continue
1483 bname = b.split('/', 2)[2]
1483 bname = b.split('/', 2)[2]
1484 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1484 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1485 if remote:
1485 if remote:
1486 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1486 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1487 tracking['refs/remotes/%s/%s' %
1487 tracking['refs/remotes/%s/%s' %
1488 (remote, ref.split('/', 2)[2])] = b
1488 (remote, ref.split('/', 2)[2])] = b
1489 return tracking
1489 return tracking
1490
1490
1491 def _abssource(self, source):
1491 def _abssource(self, source):
1492 if '://' not in source:
1492 if '://' not in source:
1493 # recognize the scp syntax as an absolute source
1493 # recognize the scp syntax as an absolute source
1494 colon = source.find(':')
1494 colon = source.find(':')
1495 if colon != -1 and '/' not in source[:colon]:
1495 if colon != -1 and '/' not in source[:colon]:
1496 return source
1496 return source
1497 self._subsource = source
1497 self._subsource = source
1498 return _abssource(self)
1498 return _abssource(self)
1499
1499
1500 def _fetch(self, source, revision):
1500 def _fetch(self, source, revision):
1501 if self._gitmissing():
1501 if self._gitmissing():
1502 source = self._abssource(source)
1502 source = self._abssource(source)
1503 self.ui.status(_('cloning subrepo %s from %s\n') %
1503 self.ui.status(_('cloning subrepo %s from %s\n') %
1504 (self._relpath, source))
1504 (self._relpath, source))
1505 self._gitnodir(['clone', source, self._abspath])
1505 self._gitnodir(['clone', source, self._abspath])
1506 if self._githavelocally(revision):
1506 if self._githavelocally(revision):
1507 return
1507 return
1508 self.ui.status(_('pulling subrepo %s from %s\n') %
1508 self.ui.status(_('pulling subrepo %s from %s\n') %
1509 (self._relpath, self._gitremote('origin')))
1509 (self._relpath, self._gitremote('origin')))
1510 # try only origin: the originally cloned repo
1510 # try only origin: the originally cloned repo
1511 self._gitcommand(['fetch'])
1511 self._gitcommand(['fetch'])
1512 if not self._githavelocally(revision):
1512 if not self._githavelocally(revision):
1513 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1513 raise error.Abort(_("revision %s does not exist in subrepo %s\n") %
1514 (revision, self._relpath))
1514 (revision, self._relpath))
1515
1515
1516 @annotatesubrepoerror
1516 @annotatesubrepoerror
1517 def dirty(self, ignoreupdate=False):
1517 def dirty(self, ignoreupdate=False):
1518 if self._gitmissing():
1518 if self._gitmissing():
1519 return self._state[1] != ''
1519 return self._state[1] != ''
1520 if self._gitisbare():
1520 if self._gitisbare():
1521 return True
1521 return True
1522 if not ignoreupdate and self._state[1] != self._gitstate():
1522 if not ignoreupdate and self._state[1] != self._gitstate():
1523 # different version checked out
1523 # different version checked out
1524 return True
1524 return True
1525 # check for staged changes or modified files; ignore untracked files
1525 # check for staged changes or modified files; ignore untracked files
1526 self._gitupdatestat()
1526 self._gitupdatestat()
1527 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1527 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1528 return code == 1
1528 return code == 1
1529
1529
1530 def basestate(self):
1530 def basestate(self):
1531 return self._gitstate()
1531 return self._gitstate()
1532
1532
1533 @annotatesubrepoerror
1533 @annotatesubrepoerror
1534 def get(self, state, overwrite=False):
1534 def get(self, state, overwrite=False):
1535 source, revision, kind = state
1535 source, revision, kind = state
1536 if not revision:
1536 if not revision:
1537 self.remove()
1537 self.remove()
1538 return
1538 return
1539 self._fetch(source, revision)
1539 self._fetch(source, revision)
1540 # if the repo was set to be bare, unbare it
1540 # if the repo was set to be bare, unbare it
1541 if self._gitisbare():
1541 if self._gitisbare():
1542 self._gitcommand(['config', 'core.bare', 'false'])
1542 self._gitcommand(['config', 'core.bare', 'false'])
1543 if self._gitstate() == revision:
1543 if self._gitstate() == revision:
1544 self._gitcommand(['reset', '--hard', 'HEAD'])
1544 self._gitcommand(['reset', '--hard', 'HEAD'])
1545 return
1545 return
1546 elif self._gitstate() == revision:
1546 elif self._gitstate() == revision:
1547 if overwrite:
1547 if overwrite:
1548 # first reset the index to unmark new files for commit, because
1548 # first reset the index to unmark new files for commit, because
1549 # reset --hard will otherwise throw away files added for commit,
1549 # reset --hard will otherwise throw away files added for commit,
1550 # not just unmark them.
1550 # not just unmark them.
1551 self._gitcommand(['reset', 'HEAD'])
1551 self._gitcommand(['reset', 'HEAD'])
1552 self._gitcommand(['reset', '--hard', 'HEAD'])
1552 self._gitcommand(['reset', '--hard', 'HEAD'])
1553 return
1553 return
1554 branch2rev, rev2branch = self._gitbranchmap()
1554 branch2rev, rev2branch = self._gitbranchmap()
1555
1555
1556 def checkout(args):
1556 def checkout(args):
1557 cmd = ['checkout']
1557 cmd = ['checkout']
1558 if overwrite:
1558 if overwrite:
1559 # first reset the index to unmark new files for commit, because
1559 # first reset the index to unmark new files for commit, because
1560 # the -f option will otherwise throw away files added for
1560 # the -f option will otherwise throw away files added for
1561 # commit, not just unmark them.
1561 # commit, not just unmark them.
1562 self._gitcommand(['reset', 'HEAD'])
1562 self._gitcommand(['reset', 'HEAD'])
1563 cmd.append('-f')
1563 cmd.append('-f')
1564 self._gitcommand(cmd + args)
1564 self._gitcommand(cmd + args)
1565 _sanitize(self.ui, self.wvfs, '.git')
1565 _sanitize(self.ui, self.wvfs, '.git')
1566
1566
1567 def rawcheckout():
1567 def rawcheckout():
1568 # no branch to checkout, check it out with no branch
1568 # no branch to checkout, check it out with no branch
1569 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1569 self.ui.warn(_('checking out detached HEAD in subrepo %s\n') %
1570 self._relpath)
1570 self._relpath)
1571 self.ui.warn(_('check out a git branch if you intend '
1571 self.ui.warn(_('check out a git branch if you intend '
1572 'to make changes\n'))
1572 'to make changes\n'))
1573 checkout(['-q', revision])
1573 checkout(['-q', revision])
1574
1574
1575 if revision not in rev2branch:
1575 if revision not in rev2branch:
1576 rawcheckout()
1576 rawcheckout()
1577 return
1577 return
1578 branches = rev2branch[revision]
1578 branches = rev2branch[revision]
1579 firstlocalbranch = None
1579 firstlocalbranch = None
1580 for b in branches:
1580 for b in branches:
1581 if b == 'refs/heads/master':
1581 if b == 'refs/heads/master':
1582 # master trumps all other branches
1582 # master trumps all other branches
1583 checkout(['refs/heads/master'])
1583 checkout(['refs/heads/master'])
1584 return
1584 return
1585 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1585 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1586 firstlocalbranch = b
1586 firstlocalbranch = b
1587 if firstlocalbranch:
1587 if firstlocalbranch:
1588 checkout([firstlocalbranch])
1588 checkout([firstlocalbranch])
1589 return
1589 return
1590
1590
1591 tracking = self._gittracking(branch2rev.keys())
1591 tracking = self._gittracking(branch2rev.keys())
1592 # choose a remote branch already tracked if possible
1592 # choose a remote branch already tracked if possible
1593 remote = branches[0]
1593 remote = branches[0]
1594 if remote not in tracking:
1594 if remote not in tracking:
1595 for b in branches:
1595 for b in branches:
1596 if b in tracking:
1596 if b in tracking:
1597 remote = b
1597 remote = b
1598 break
1598 break
1599
1599
1600 if remote not in tracking:
1600 if remote not in tracking:
1601 # create a new local tracking branch
1601 # create a new local tracking branch
1602 local = remote.split('/', 3)[3]
1602 local = remote.split('/', 3)[3]
1603 checkout(['-b', local, remote])
1603 checkout(['-b', local, remote])
1604 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1604 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1605 # When updating to a tracked remote branch,
1605 # When updating to a tracked remote branch,
1606 # if the local tracking branch is downstream of it,
1606 # if the local tracking branch is downstream of it,
1607 # a normal `git pull` would have performed a "fast-forward merge"
1607 # a normal `git pull` would have performed a "fast-forward merge"
1608 # which is equivalent to updating the local branch to the remote.
1608 # which is equivalent to updating the local branch to the remote.
1609 # Since we are only looking at branching at update, we need to
1609 # Since we are only looking at branching at update, we need to
1610 # detect this situation and perform this action lazily.
1610 # detect this situation and perform this action lazily.
1611 if tracking[remote] != self._gitcurrentbranch():
1611 if tracking[remote] != self._gitcurrentbranch():
1612 checkout([tracking[remote]])
1612 checkout([tracking[remote]])
1613 self._gitcommand(['merge', '--ff', remote])
1613 self._gitcommand(['merge', '--ff', remote])
1614 _sanitize(self.ui, self.wvfs, '.git')
1614 _sanitize(self.ui, self.wvfs, '.git')
1615 else:
1615 else:
1616 # a real merge would be required, just checkout the revision
1616 # a real merge would be required, just checkout the revision
1617 rawcheckout()
1617 rawcheckout()
1618
1618
1619 @annotatesubrepoerror
1619 @annotatesubrepoerror
1620 def commit(self, text, user, date):
1620 def commit(self, text, user, date):
1621 if self._gitmissing():
1621 if self._gitmissing():
1622 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1622 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1623 cmd = ['commit', '-a', '-m', text]
1623 cmd = ['commit', '-a', '-m', text]
1624 env = os.environ.copy()
1624 env = os.environ.copy()
1625 if user:
1625 if user:
1626 cmd += ['--author', user]
1626 cmd += ['--author', user]
1627 if date:
1627 if date:
1628 # git's date parser silently ignores when seconds < 1e9
1628 # git's date parser silently ignores when seconds < 1e9
1629 # convert to ISO8601
1629 # convert to ISO8601
1630 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1630 env['GIT_AUTHOR_DATE'] = util.datestr(date,
1631 '%Y-%m-%dT%H:%M:%S %1%2')
1631 '%Y-%m-%dT%H:%M:%S %1%2')
1632 self._gitcommand(cmd, env=env)
1632 self._gitcommand(cmd, env=env)
1633 # make sure commit works otherwise HEAD might not exist under certain
1633 # make sure commit works otherwise HEAD might not exist under certain
1634 # circumstances
1634 # circumstances
1635 return self._gitstate()
1635 return self._gitstate()
1636
1636
1637 @annotatesubrepoerror
1637 @annotatesubrepoerror
1638 def merge(self, state):
1638 def merge(self, state):
1639 source, revision, kind = state
1639 source, revision, kind = state
1640 self._fetch(source, revision)
1640 self._fetch(source, revision)
1641 base = self._gitcommand(['merge-base', revision, self._state[1]])
1641 base = self._gitcommand(['merge-base', revision, self._state[1]])
1642 self._gitupdatestat()
1642 self._gitupdatestat()
1643 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1643 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1644
1644
1645 def mergefunc():
1645 def mergefunc():
1646 if base == revision:
1646 if base == revision:
1647 self.get(state) # fast forward merge
1647 self.get(state) # fast forward merge
1648 elif base != self._state[1]:
1648 elif base != self._state[1]:
1649 self._gitcommand(['merge', '--no-commit', revision])
1649 self._gitcommand(['merge', '--no-commit', revision])
1650 _sanitize(self.ui, self.wvfs, '.git')
1650 _sanitize(self.ui, self.wvfs, '.git')
1651
1651
1652 if self.dirty():
1652 if self.dirty():
1653 if self._gitstate() != revision:
1653 if self._gitstate() != revision:
1654 dirty = self._gitstate() == self._state[1] or code != 0
1654 dirty = self._gitstate() == self._state[1] or code != 0
1655 if _updateprompt(self.ui, self, dirty,
1655 if _updateprompt(self.ui, self, dirty,
1656 self._state[1][:7], revision[:7]):
1656 self._state[1][:7], revision[:7]):
1657 mergefunc()
1657 mergefunc()
1658 else:
1658 else:
1659 mergefunc()
1659 mergefunc()
1660
1660
1661 @annotatesubrepoerror
1661 @annotatesubrepoerror
1662 def push(self, opts):
1662 def push(self, opts):
1663 force = opts.get('force')
1663 force = opts.get('force')
1664
1664
1665 if not self._state[1]:
1665 if not self._state[1]:
1666 return True
1666 return True
1667 if self._gitmissing():
1667 if self._gitmissing():
1668 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1668 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1669 # if a branch in origin contains the revision, nothing to do
1669 # if a branch in origin contains the revision, nothing to do
1670 branch2rev, rev2branch = self._gitbranchmap()
1670 branch2rev, rev2branch = self._gitbranchmap()
1671 if self._state[1] in rev2branch:
1671 if self._state[1] in rev2branch:
1672 for b in rev2branch[self._state[1]]:
1672 for b in rev2branch[self._state[1]]:
1673 if b.startswith('refs/remotes/origin/'):
1673 if b.startswith('refs/remotes/origin/'):
1674 return True
1674 return True
1675 for b, revision in branch2rev.iteritems():
1675 for b, revision in branch2rev.iteritems():
1676 if b.startswith('refs/remotes/origin/'):
1676 if b.startswith('refs/remotes/origin/'):
1677 if self._gitisancestor(self._state[1], revision):
1677 if self._gitisancestor(self._state[1], revision):
1678 return True
1678 return True
1679 # otherwise, try to push the currently checked out branch
1679 # otherwise, try to push the currently checked out branch
1680 cmd = ['push']
1680 cmd = ['push']
1681 if force:
1681 if force:
1682 cmd.append('--force')
1682 cmd.append('--force')
1683
1683
1684 current = self._gitcurrentbranch()
1684 current = self._gitcurrentbranch()
1685 if current:
1685 if current:
1686 # determine if the current branch is even useful
1686 # determine if the current branch is even useful
1687 if not self._gitisancestor(self._state[1], current):
1687 if not self._gitisancestor(self._state[1], current):
1688 self.ui.warn(_('unrelated git branch checked out '
1688 self.ui.warn(_('unrelated git branch checked out '
1689 'in subrepo %s\n') % self._relpath)
1689 'in subrepo %s\n') % self._relpath)
1690 return False
1690 return False
1691 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1691 self.ui.status(_('pushing branch %s of subrepo %s\n') %
1692 (current.split('/', 2)[2], self._relpath))
1692 (current.split('/', 2)[2], self._relpath))
1693 ret = self._gitdir(cmd + ['origin', current])
1693 ret = self._gitdir(cmd + ['origin', current])
1694 return ret[1] == 0
1694 return ret[1] == 0
1695 else:
1695 else:
1696 self.ui.warn(_('no branch checked out in subrepo %s\n'
1696 self.ui.warn(_('no branch checked out in subrepo %s\n'
1697 'cannot push revision %s\n') %
1697 'cannot push revision %s\n') %
1698 (self._relpath, self._state[1]))
1698 (self._relpath, self._state[1]))
1699 return False
1699 return False
1700
1700
1701 @annotatesubrepoerror
1701 @annotatesubrepoerror
1702 def add(self, ui, match, prefix, explicitonly, **opts):
1702 def add(self, ui, match, prefix, explicitonly, **opts):
1703 if self._gitmissing():
1703 if self._gitmissing():
1704 return []
1704 return []
1705
1705
1706 (modified, added, removed,
1706 (modified, added, removed,
1707 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1707 deleted, unknown, ignored, clean) = self.status(None, unknown=True,
1708 clean=True)
1708 clean=True)
1709
1709
1710 tracked = set()
1710 tracked = set()
1711 # dirstates 'amn' warn, 'r' is added again
1711 # dirstates 'amn' warn, 'r' is added again
1712 for l in (modified, added, deleted, clean):
1712 for l in (modified, added, deleted, clean):
1713 tracked.update(l)
1713 tracked.update(l)
1714
1714
1715 # Unknown files not of interest will be rejected by the matcher
1715 # Unknown files not of interest will be rejected by the matcher
1716 files = unknown
1716 files = unknown
1717 files.extend(match.files())
1717 files.extend(match.files())
1718
1718
1719 rejected = []
1719 rejected = []
1720
1720
1721 files = [f for f in sorted(set(files)) if match(f)]
1721 files = [f for f in sorted(set(files)) if match(f)]
1722 for f in files:
1722 for f in files:
1723 exact = match.exact(f)
1723 exact = match.exact(f)
1724 command = ["add"]
1724 command = ["add"]
1725 if exact:
1725 if exact:
1726 command.append("-f") #should be added, even if ignored
1726 command.append("-f") #should be added, even if ignored
1727 if ui.verbose or not exact:
1727 if ui.verbose or not exact:
1728 ui.status(_('adding %s\n') % match.rel(f))
1728 ui.status(_('adding %s\n') % match.rel(f))
1729
1729
1730 if f in tracked: # hg prints 'adding' even if already tracked
1730 if f in tracked: # hg prints 'adding' even if already tracked
1731 if exact:
1731 if exact:
1732 rejected.append(f)
1732 rejected.append(f)
1733 continue
1733 continue
1734 if not opts.get('dry_run'):
1734 if not opts.get('dry_run'):
1735 self._gitcommand(command + [f])
1735 self._gitcommand(command + [f])
1736
1736
1737 for f in rejected:
1737 for f in rejected:
1738 ui.warn(_("%s already tracked!\n") % match.abs(f))
1738 ui.warn(_("%s already tracked!\n") % match.abs(f))
1739
1739
1740 return rejected
1740 return rejected
1741
1741
1742 @annotatesubrepoerror
1742 @annotatesubrepoerror
1743 def remove(self):
1743 def remove(self):
1744 if self._gitmissing():
1744 if self._gitmissing():
1745 return
1745 return
1746 if self.dirty():
1746 if self.dirty():
1747 self.ui.warn(_('not removing repo %s because '
1747 self.ui.warn(_('not removing repo %s because '
1748 'it has changes.\n') % self._relpath)
1748 'it has changes.\n') % self._relpath)
1749 return
1749 return
1750 # we can't fully delete the repository as it may contain
1750 # we can't fully delete the repository as it may contain
1751 # local-only history
1751 # local-only history
1752 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1752 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1753 self._gitcommand(['config', 'core.bare', 'true'])
1753 self._gitcommand(['config', 'core.bare', 'true'])
1754 for f, kind in self.wvfs.readdir():
1754 for f, kind in self.wvfs.readdir():
1755 if f == '.git':
1755 if f == '.git':
1756 continue
1756 continue
1757 if kind == stat.S_IFDIR:
1757 if kind == stat.S_IFDIR:
1758 self.wvfs.rmtree(f)
1758 self.wvfs.rmtree(f)
1759 else:
1759 else:
1760 self.wvfs.unlink(f)
1760 self.wvfs.unlink(f)
1761
1761
1762 def archive(self, archiver, prefix, match=None):
1762 def archive(self, archiver, prefix, match=None):
1763 total = 0
1763 total = 0
1764 source, revision = self._state
1764 source, revision = self._state
1765 if not revision:
1765 if not revision:
1766 return total
1766 return total
1767 self._fetch(source, revision)
1767 self._fetch(source, revision)
1768
1768
1769 # Parse git's native archive command.
1769 # Parse git's native archive command.
1770 # This should be much faster than manually traversing the trees
1770 # This should be much faster than manually traversing the trees
1771 # and objects with many subprocess calls.
1771 # and objects with many subprocess calls.
1772 tarstream = self._gitcommand(['archive', revision], stream=True)
1772 tarstream = self._gitcommand(['archive', revision], stream=True)
1773 tar = tarfile.open(fileobj=tarstream, mode='r|')
1773 tar = tarfile.open(fileobj=tarstream, mode='r|')
1774 relpath = subrelpath(self)
1774 relpath = subrelpath(self)
1775 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1775 self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'))
1776 for i, info in enumerate(tar):
1776 for i, info in enumerate(tar):
1777 if info.isdir():
1777 if info.isdir():
1778 continue
1778 continue
1779 if match and not match(info.name):
1779 if match and not match(info.name):
1780 continue
1780 continue
1781 if info.issym():
1781 if info.issym():
1782 data = info.linkname
1782 data = info.linkname
1783 else:
1783 else:
1784 data = tar.extractfile(info).read()
1784 data = tar.extractfile(info).read()
1785 archiver.addfile(prefix + self._path + '/' + info.name,
1785 archiver.addfile(prefix + self._path + '/' + info.name,
1786 info.mode, info.issym(), data)
1786 info.mode, info.issym(), data)
1787 total += 1
1787 total += 1
1788 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1788 self.ui.progress(_('archiving (%s)') % relpath, i + 1,
1789 unit=_('files'))
1789 unit=_('files'))
1790 self.ui.progress(_('archiving (%s)') % relpath, None)
1790 self.ui.progress(_('archiving (%s)') % relpath, None)
1791 return total
1791 return total
1792
1792
1793
1793
1794 @annotatesubrepoerror
1794 @annotatesubrepoerror
1795 def cat(self, match, prefix, **opts):
1795 def cat(self, match, prefix, **opts):
1796 rev = self._state[1]
1796 rev = self._state[1]
1797 if match.anypats():
1797 if match.anypats():
1798 return 1 #No support for include/exclude yet
1798 return 1 #No support for include/exclude yet
1799
1799
1800 if not match.files():
1800 if not match.files():
1801 return 1
1801 return 1
1802
1802
1803 for f in match.files():
1803 for f in match.files():
1804 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1804 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1805 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1805 fp = cmdutil.makefileobj(self._subparent, opts.get('output'),
1806 self._ctx.node(),
1806 self._ctx.node(),
1807 pathname=self.wvfs.reljoin(prefix, f))
1807 pathname=self.wvfs.reljoin(prefix, f))
1808 fp.write(output)
1808 fp.write(output)
1809 fp.close()
1809 fp.close()
1810 return 0
1810 return 0
1811
1811
1812
1812
1813 @annotatesubrepoerror
1813 @annotatesubrepoerror
1814 def status(self, rev2, **opts):
1814 def status(self, rev2, **opts):
1815 rev1 = self._state[1]
1815 rev1 = self._state[1]
1816 if self._gitmissing() or not rev1:
1816 if self._gitmissing() or not rev1:
1817 # if the repo is missing, return no results
1817 # if the repo is missing, return no results
1818 return scmutil.status([], [], [], [], [], [], [])
1818 return scmutil.status([], [], [], [], [], [], [])
1819 modified, added, removed = [], [], []
1819 modified, added, removed = [], [], []
1820 self._gitupdatestat()
1820 self._gitupdatestat()
1821 if rev2:
1821 if rev2:
1822 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1822 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1823 else:
1823 else:
1824 command = ['diff-index', '--no-renames', rev1]
1824 command = ['diff-index', '--no-renames', rev1]
1825 out = self._gitcommand(command)
1825 out = self._gitcommand(command)
1826 for line in out.split('\n'):
1826 for line in out.split('\n'):
1827 tab = line.find('\t')
1827 tab = line.find('\t')
1828 if tab == -1:
1828 if tab == -1:
1829 continue
1829 continue
1830 status, f = line[tab - 1], line[tab + 1:]
1830 status, f = line[tab - 1], line[tab + 1:]
1831 if status == 'M':
1831 if status == 'M':
1832 modified.append(f)
1832 modified.append(f)
1833 elif status == 'A':
1833 elif status == 'A':
1834 added.append(f)
1834 added.append(f)
1835 elif status == 'D':
1835 elif status == 'D':
1836 removed.append(f)
1836 removed.append(f)
1837
1837
1838 deleted, unknown, ignored, clean = [], [], [], []
1838 deleted, unknown, ignored, clean = [], [], [], []
1839
1839
1840 command = ['status', '--porcelain', '-z']
1840 command = ['status', '--porcelain', '-z']
1841 if opts.get('unknown'):
1841 if opts.get('unknown'):
1842 command += ['--untracked-files=all']
1842 command += ['--untracked-files=all']
1843 if opts.get('ignored'):
1843 if opts.get('ignored'):
1844 command += ['--ignored']
1844 command += ['--ignored']
1845 out = self._gitcommand(command)
1845 out = self._gitcommand(command)
1846
1846
1847 changedfiles = set()
1847 changedfiles = set()
1848 changedfiles.update(modified)
1848 changedfiles.update(modified)
1849 changedfiles.update(added)
1849 changedfiles.update(added)
1850 changedfiles.update(removed)
1850 changedfiles.update(removed)
1851 for line in out.split('\0'):
1851 for line in out.split('\0'):
1852 if not line:
1852 if not line:
1853 continue
1853 continue
1854 st = line[0:2]
1854 st = line[0:2]
1855 #moves and copies show 2 files on one line
1855 #moves and copies show 2 files on one line
1856 if line.find('\0') >= 0:
1856 if line.find('\0') >= 0:
1857 filename1, filename2 = line[3:].split('\0')
1857 filename1, filename2 = line[3:].split('\0')
1858 else:
1858 else:
1859 filename1 = line[3:]
1859 filename1 = line[3:]
1860 filename2 = None
1860 filename2 = None
1861
1861
1862 changedfiles.add(filename1)
1862 changedfiles.add(filename1)
1863 if filename2:
1863 if filename2:
1864 changedfiles.add(filename2)
1864 changedfiles.add(filename2)
1865
1865
1866 if st == '??':
1866 if st == '??':
1867 unknown.append(filename1)
1867 unknown.append(filename1)
1868 elif st == '!!':
1868 elif st == '!!':
1869 ignored.append(filename1)
1869 ignored.append(filename1)
1870
1870
1871 if opts.get('clean'):
1871 if opts.get('clean'):
1872 out = self._gitcommand(['ls-files'])
1872 out = self._gitcommand(['ls-files'])
1873 for f in out.split('\n'):
1873 for f in out.split('\n'):
1874 if not f in changedfiles:
1874 if not f in changedfiles:
1875 clean.append(f)
1875 clean.append(f)
1876
1876
1877 return scmutil.status(modified, added, removed, deleted,
1877 return scmutil.status(modified, added, removed, deleted,
1878 unknown, ignored, clean)
1878 unknown, ignored, clean)
1879
1879
1880 @annotatesubrepoerror
1880 @annotatesubrepoerror
1881 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1881 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1882 node1 = self._state[1]
1882 node1 = self._state[1]
1883 cmd = ['diff', '--no-renames']
1883 cmd = ['diff', '--no-renames']
1884 if opts['stat']:
1884 if opts['stat']:
1885 cmd.append('--stat')
1885 cmd.append('--stat')
1886 else:
1886 else:
1887 # for Git, this also implies '-p'
1887 # for Git, this also implies '-p'
1888 cmd.append('-U%d' % diffopts.context)
1888 cmd.append('-U%d' % diffopts.context)
1889
1889
1890 gitprefix = self.wvfs.reljoin(prefix, self._path)
1890 gitprefix = self.wvfs.reljoin(prefix, self._path)
1891
1891
1892 if diffopts.noprefix:
1892 if diffopts.noprefix:
1893 cmd.extend(['--src-prefix=%s/' % gitprefix,
1893 cmd.extend(['--src-prefix=%s/' % gitprefix,
1894 '--dst-prefix=%s/' % gitprefix])
1894 '--dst-prefix=%s/' % gitprefix])
1895 else:
1895 else:
1896 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1896 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1897 '--dst-prefix=b/%s/' % gitprefix])
1897 '--dst-prefix=b/%s/' % gitprefix])
1898
1898
1899 if diffopts.ignorews:
1899 if diffopts.ignorews:
1900 cmd.append('--ignore-all-space')
1900 cmd.append('--ignore-all-space')
1901 if diffopts.ignorewsamount:
1901 if diffopts.ignorewsamount:
1902 cmd.append('--ignore-space-change')
1902 cmd.append('--ignore-space-change')
1903 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1903 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1904 and diffopts.ignoreblanklines:
1904 and diffopts.ignoreblanklines:
1905 cmd.append('--ignore-blank-lines')
1905 cmd.append('--ignore-blank-lines')
1906
1906
1907 cmd.append(node1)
1907 cmd.append(node1)
1908 if node2:
1908 if node2:
1909 cmd.append(node2)
1909 cmd.append(node2)
1910
1910
1911 output = ""
1911 output = ""
1912 if match.always():
1912 if match.always():
1913 output += self._gitcommand(cmd) + '\n'
1913 output += self._gitcommand(cmd) + '\n'
1914 else:
1914 else:
1915 st = self.status(node2)[:3]
1915 st = self.status(node2)[:3]
1916 files = [f for sublist in st for f in sublist]
1916 files = [f for sublist in st for f in sublist]
1917 for f in files:
1917 for f in files:
1918 if match(f):
1918 if match(f):
1919 output += self._gitcommand(cmd + ['--', f]) + '\n'
1919 output += self._gitcommand(cmd + ['--', f]) + '\n'
1920
1920
1921 if output.strip():
1921 if output.strip():
1922 ui.write(output)
1922 ui.write(output)
1923
1923
1924 @annotatesubrepoerror
1924 @annotatesubrepoerror
1925 def revert(self, substate, *pats, **opts):
1925 def revert(self, substate, *pats, **opts):
1926 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1926 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1927 if not opts.get('no_backup'):
1927 if not opts.get('no_backup'):
1928 status = self.status(None)
1928 status = self.status(None)
1929 names = status.modified
1929 names = status.modified
1930 for name in names:
1930 for name in names:
1931 bakname = scmutil.origpath(self.ui, self._subparent, name)
1931 bakname = scmutil.origpath(self.ui, self._subparent, name)
1932 self.ui.note(_('saving current version of %s as %s\n') %
1932 self.ui.note(_('saving current version of %s as %s\n') %
1933 (name, bakname))
1933 (name, bakname))
1934 self.wvfs.rename(name, bakname)
1934 self.wvfs.rename(name, bakname)
1935
1935
1936 if not opts.get('dry_run'):
1936 if not opts.get('dry_run'):
1937 self.get(substate, overwrite=True)
1937 self.get(substate, overwrite=True)
1938 return []
1938 return []
1939
1939
1940 def shortid(self, revid):
1940 def shortid(self, revid):
1941 return revid[:7]
1941 return revid[:7]
1942
1942
1943 types = {
1943 types = {
1944 'hg': hgsubrepo,
1944 'hg': hgsubrepo,
1945 'svn': svnsubrepo,
1945 'svn': svnsubrepo,
1946 'git': gitsubrepo,
1946 'git': gitsubrepo,
1947 }
1947 }
@@ -1,124 +1,142
1 $ hg init repo
1 $ hg init repo
2 $ cd repo
2 $ cd repo
3 $ hg init subrepo
3 $ hg init subrepo
4 $ echo a > subrepo/a
4 $ echo a > subrepo/a
5 $ hg -R subrepo ci -Am adda
5 $ hg -R subrepo ci -Am adda
6 adding a
6 adding a
7 $ echo 'subrepo = subrepo' > .hgsub
7 $ echo 'subrepo = subrepo' > .hgsub
8 $ hg ci -Am addsubrepo
8 $ hg ci -Am addsubrepo
9 adding .hgsub
9 adding .hgsub
10 $ echo b > subrepo/b
10 $ echo b > subrepo/b
11 $ hg -R subrepo ci -Am addb
11 $ hg -R subrepo ci -Am addb
12 adding b
12 adding b
13 $ hg ci -m updatedsub
13 $ hg ci -m updatedsub
14
14
15 ignore blanklines in .hgsubstate
15 ignore blanklines in .hgsubstate
16
16
17 >>> file('.hgsubstate', 'wb').write('\n\n \t \n \n')
17 >>> file('.hgsubstate', 'wb').write('\n\n \t \n \n')
18 $ hg st --subrepos
18 $ hg st --subrepos
19 M .hgsubstate
19 M .hgsubstate
20 $ hg revert -qC .hgsubstate
20 $ hg revert -qC .hgsubstate
21
21
22 abort more gracefully on .hgsubstate parsing error
22 abort more gracefully on .hgsubstate parsing error
23
23
24 $ cp .hgsubstate .hgsubstate.old
24 $ cp .hgsubstate .hgsubstate.old
25 >>> file('.hgsubstate', 'wb').write('\ninvalid')
25 >>> file('.hgsubstate', 'wb').write('\ninvalid')
26 $ hg st --subrepos --cwd $TESTTMP -R $TESTTMP/repo
26 $ hg st --subrepos --cwd $TESTTMP -R $TESTTMP/repo
27 abort: invalid subrepository revision specifier in 'repo/.hgsubstate' line 2
27 abort: invalid subrepository revision specifier in 'repo/.hgsubstate' line 2
28 [255]
28 [255]
29 $ mv .hgsubstate.old .hgsubstate
29 $ mv .hgsubstate.old .hgsubstate
30
30
31 delete .hgsub and revert it
31 delete .hgsub and revert it
32
32
33 $ rm .hgsub
33 $ rm .hgsub
34 $ hg revert .hgsub
34 $ hg revert .hgsub
35 warning: subrepo spec file '.hgsub' not found
35 warning: subrepo spec file '.hgsub' not found
36 warning: subrepo spec file '.hgsub' not found
36 warning: subrepo spec file '.hgsub' not found
37 warning: subrepo spec file '.hgsub' not found
37 warning: subrepo spec file '.hgsub' not found
38
38
39 delete .hgsubstate and revert it
39 delete .hgsubstate and revert it
40
40
41 $ rm .hgsubstate
41 $ rm .hgsubstate
42 $ hg revert .hgsubstate
42 $ hg revert .hgsubstate
43
43
44 delete .hgsub and update
44 delete .hgsub and update
45
45
46 $ rm .hgsub
46 $ rm .hgsub
47 $ hg up 0 --cwd $TESTTMP -R $TESTTMP/repo
47 $ hg up 0 --cwd $TESTTMP -R $TESTTMP/repo
48 warning: subrepo spec file 'repo/.hgsub' not found
48 warning: subrepo spec file 'repo/.hgsub' not found
49 warning: subrepo spec file 'repo/.hgsub' not found
49 warning: subrepo spec file 'repo/.hgsub' not found
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
50 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
51 $ hg st
51 $ hg st
52 warning: subrepo spec file '.hgsub' not found
52 warning: subrepo spec file '.hgsub' not found
53 ! .hgsub
53 ! .hgsub
54 $ ls subrepo
54 $ ls subrepo
55 a
55 a
56
56
57 delete .hgsubstate and update
57 delete .hgsubstate and update
58
58
59 $ hg up -C
59 $ hg up -C
60 warning: subrepo spec file '.hgsub' not found
60 warning: subrepo spec file '.hgsub' not found
61 warning: subrepo spec file '.hgsub' not found
61 warning: subrepo spec file '.hgsub' not found
62 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
62 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
63 $ rm .hgsubstate
63 $ rm .hgsubstate
64 $ hg up 0
64 $ hg up 0
65 remote changed .hgsubstate which local deleted
65 remote changed .hgsubstate which local deleted
66 use (c)hanged version or leave (d)eleted? c
66 use (c)hanged version or leave (d)eleted? c
67 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
67 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
68 $ hg st
68 $ hg st
69 $ ls subrepo
69 $ ls subrepo
70 a
70 a
71
71
72 Enable obsolete
72 Enable obsolete
73
73
74 $ cat >> $HGRCPATH << EOF
74 $ cat >> $HGRCPATH << EOF
75 > [ui]
75 > [ui]
76 > logtemplate= {rev}:{node|short} {desc|firstline}
76 > logtemplate= {rev}:{node|short} {desc|firstline}
77 > [phases]
77 > [phases]
78 > publish=False
78 > publish=False
79 > [experimental]
79 > [experimental]
80 > evolution=createmarkers
80 > evolution=createmarkers
81 > EOF
81 > EOF
82
82
83 check that we can update parent repo with missing (amended) subrepo revision
83 check that we can update parent repo with missing (amended) subrepo revision
84
84
85 $ hg up --repository subrepo -r tip
85 $ hg up --repository subrepo -r tip
86 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
86 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
87 $ hg ci -m "updated subrepo to tip"
87 $ hg ci -m "updated subrepo to tip"
88 created new head
88 created new head
89 $ cd subrepo
89 $ cd subrepo
90 $ hg update -r tip
90 $ hg update -r tip
91 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
91 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
92 $ echo foo > a
92 $ echo foo > a
93 $ hg commit --amend -m "addb (amended)"
93 $ hg commit --amend -m "addb (amended)"
94 $ cd ..
94 $ cd ..
95 $ hg update --clean .
95 $ hg update --clean .
96 revision 102a90ea7b4a in subrepo subrepo is hidden
96 revision 102a90ea7b4a in subrepo subrepo is hidden
97 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
97 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
98
98
99 check that --hidden is propagated to the subrepo
99 check that --hidden is propagated to the subrepo
100
100
101 $ hg -R subrepo up tip
101 $ hg -R subrepo up tip
102 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
103 $ hg ci -m 'commit with amended subrepo'
103 $ hg ci -m 'commit with amended subrepo'
104 $ echo bar > subrepo/a
104 $ echo bar > subrepo/a
105 $ hg -R subrepo ci --amend -m "amend a (again)"
105 $ hg -R subrepo ci --amend -m "amend a (again)"
106 $ hg --hidden cat subrepo/a
106 $ hg --hidden cat subrepo/a
107 foo
107 foo
108
108
109 verify will warn if locked-in subrepo revisions are hidden or missing
109 verify will warn if locked-in subrepo revisions are hidden or missing
110
110
111 $ hg ci -m "amended subrepo (again)"
111 $ hg ci -m "amended subrepo (again)"
112 $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip'
112 $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip'
113 $ hg verify
113 $ hg verify
114 checking changesets
114 checking changesets
115 checking manifests
115 checking manifests
116 crosschecking files in changesets and manifests
116 crosschecking files in changesets and manifests
117 checking files
117 checking files
118 2 files, 5 changesets, 5 total revisions
118 2 files, 5 changesets, 5 total revisions
119 checking subrepo links
119 checking subrepo links
120 subrepo 'subrepo' is hidden in revision a66de08943b6
120 subrepo 'subrepo' is hidden in revision a66de08943b6
121 subrepo 'subrepo' is hidden in revision 674d05939c1e
121 subrepo 'subrepo' is hidden in revision 674d05939c1e
122 subrepo 'subrepo' not found in revision a7d05d9055a4
122 subrepo 'subrepo' not found in revision a7d05d9055a4
123
123
124 verifying shouldn't init a new subrepo if the reference doesn't exist
125
126 $ mv subrepo b
127 $ hg verify
128 checking changesets
129 checking manifests
130 crosschecking files in changesets and manifests
131 checking files
132 2 files, 5 changesets, 5 total revisions
133 checking subrepo links
134 0: repository $TESTTMP/repo/subrepo not found (glob)
135 1: repository $TESTTMP/repo/subrepo not found (glob)
136 3: repository $TESTTMP/repo/subrepo not found (glob)
137 4: repository $TESTTMP/repo/subrepo not found (glob)
138 $ ls
139 b
140 $ mv b subrepo
141
124 $ cd ..
142 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now