##// END OF EJS Templates
subrepo: adjust subrepo prefix before calling subrepo.addremove() (API)...
Martin von Zweigbergk -
r41778:5ee3c49f default
parent child Browse files
Show More
@@ -1,1841 +1,1842 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import errno
10 import errno
11 import glob
11 import glob
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import subprocess
15 import subprocess
16 import weakref
16 import weakref
17
17
18 from .i18n import _
18 from .i18n import _
19 from .node import (
19 from .node import (
20 bin,
20 bin,
21 hex,
21 hex,
22 nullid,
22 nullid,
23 nullrev,
23 nullrev,
24 short,
24 short,
25 wdirid,
25 wdirid,
26 wdirrev,
26 wdirrev,
27 )
27 )
28
28
29 from . import (
29 from . import (
30 encoding,
30 encoding,
31 error,
31 error,
32 match as matchmod,
32 match as matchmod,
33 obsolete,
33 obsolete,
34 obsutil,
34 obsutil,
35 pathutil,
35 pathutil,
36 phases,
36 phases,
37 policy,
37 policy,
38 pycompat,
38 pycompat,
39 revsetlang,
39 revsetlang,
40 similar,
40 similar,
41 smartset,
41 smartset,
42 url,
42 url,
43 util,
43 util,
44 vfs,
44 vfs,
45 )
45 )
46
46
47 from .utils import (
47 from .utils import (
48 procutil,
48 procutil,
49 stringutil,
49 stringutil,
50 )
50 )
51
51
52 if pycompat.iswindows:
52 if pycompat.iswindows:
53 from . import scmwindows as scmplatform
53 from . import scmwindows as scmplatform
54 else:
54 else:
55 from . import scmposix as scmplatform
55 from . import scmposix as scmplatform
56
56
57 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
58
58
59 termsize = scmplatform.termsize
59 termsize = scmplatform.termsize
60
60
61 class status(tuple):
61 class status(tuple):
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
62 '''Named tuple with a list of files per status. The 'deleted', 'unknown'
63 and 'ignored' properties are only relevant to the working copy.
63 and 'ignored' properties are only relevant to the working copy.
64 '''
64 '''
65
65
66 __slots__ = ()
66 __slots__ = ()
67
67
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
68 def __new__(cls, modified, added, removed, deleted, unknown, ignored,
69 clean):
69 clean):
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
70 return tuple.__new__(cls, (modified, added, removed, deleted, unknown,
71 ignored, clean))
71 ignored, clean))
72
72
73 @property
73 @property
74 def modified(self):
74 def modified(self):
75 '''files that have been modified'''
75 '''files that have been modified'''
76 return self[0]
76 return self[0]
77
77
78 @property
78 @property
79 def added(self):
79 def added(self):
80 '''files that have been added'''
80 '''files that have been added'''
81 return self[1]
81 return self[1]
82
82
83 @property
83 @property
84 def removed(self):
84 def removed(self):
85 '''files that have been removed'''
85 '''files that have been removed'''
86 return self[2]
86 return self[2]
87
87
88 @property
88 @property
89 def deleted(self):
89 def deleted(self):
90 '''files that are in the dirstate, but have been deleted from the
90 '''files that are in the dirstate, but have been deleted from the
91 working copy (aka "missing")
91 working copy (aka "missing")
92 '''
92 '''
93 return self[3]
93 return self[3]
94
94
95 @property
95 @property
96 def unknown(self):
96 def unknown(self):
97 '''files not in the dirstate that are not ignored'''
97 '''files not in the dirstate that are not ignored'''
98 return self[4]
98 return self[4]
99
99
100 @property
100 @property
101 def ignored(self):
101 def ignored(self):
102 '''files not in the dirstate that are ignored (by _dirignore())'''
102 '''files not in the dirstate that are ignored (by _dirignore())'''
103 return self[5]
103 return self[5]
104
104
105 @property
105 @property
106 def clean(self):
106 def clean(self):
107 '''files that have not been modified'''
107 '''files that have not been modified'''
108 return self[6]
108 return self[6]
109
109
110 def __repr__(self, *args, **kwargs):
110 def __repr__(self, *args, **kwargs):
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
111 return ((r'<status modified=%s, added=%s, removed=%s, deleted=%s, '
112 r'unknown=%s, ignored=%s, clean=%s>') %
112 r'unknown=%s, ignored=%s, clean=%s>') %
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
113 tuple(pycompat.sysstr(stringutil.pprint(v)) for v in self))
114
114
115 def itersubrepos(ctx1, ctx2):
115 def itersubrepos(ctx1, ctx2):
116 """find subrepos in ctx1 or ctx2"""
116 """find subrepos in ctx1 or ctx2"""
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
117 # Create a (subpath, ctx) mapping where we prefer subpaths from
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
118 # ctx1. The subpaths from ctx2 are important when the .hgsub file
119 # has been modified (in ctx2) but not yet committed (in ctx1).
119 # has been modified (in ctx2) but not yet committed (in ctx1).
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
120 subpaths = dict.fromkeys(ctx2.substate, ctx2)
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
121 subpaths.update(dict.fromkeys(ctx1.substate, ctx1))
122
122
123 missing = set()
123 missing = set()
124
124
125 for subpath in ctx2.substate:
125 for subpath in ctx2.substate:
126 if subpath not in ctx1.substate:
126 if subpath not in ctx1.substate:
127 del subpaths[subpath]
127 del subpaths[subpath]
128 missing.add(subpath)
128 missing.add(subpath)
129
129
130 for subpath, ctx in sorted(subpaths.iteritems()):
130 for subpath, ctx in sorted(subpaths.iteritems()):
131 yield subpath, ctx.sub(subpath)
131 yield subpath, ctx.sub(subpath)
132
132
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
133 # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way,
134 # status and diff will have an accurate result when it does
134 # status and diff will have an accurate result when it does
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
135 # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared
136 # against itself.
136 # against itself.
137 for subpath in missing:
137 for subpath in missing:
138 yield subpath, ctx2.nullsub(subpath, ctx1)
138 yield subpath, ctx2.nullsub(subpath, ctx1)
139
139
140 def nochangesfound(ui, repo, excluded=None):
140 def nochangesfound(ui, repo, excluded=None):
141 '''Report no changes for push/pull, excluded is None or a list of
141 '''Report no changes for push/pull, excluded is None or a list of
142 nodes excluded from the push/pull.
142 nodes excluded from the push/pull.
143 '''
143 '''
144 secretlist = []
144 secretlist = []
145 if excluded:
145 if excluded:
146 for n in excluded:
146 for n in excluded:
147 ctx = repo[n]
147 ctx = repo[n]
148 if ctx.phase() >= phases.secret and not ctx.extinct():
148 if ctx.phase() >= phases.secret and not ctx.extinct():
149 secretlist.append(n)
149 secretlist.append(n)
150
150
151 if secretlist:
151 if secretlist:
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
152 ui.status(_("no changes found (ignored %d secret changesets)\n")
153 % len(secretlist))
153 % len(secretlist))
154 else:
154 else:
155 ui.status(_("no changes found\n"))
155 ui.status(_("no changes found\n"))
156
156
157 def callcatch(ui, func):
157 def callcatch(ui, func):
158 """call func() with global exception handling
158 """call func() with global exception handling
159
159
160 return func() if no exception happens. otherwise do some error handling
160 return func() if no exception happens. otherwise do some error handling
161 and return an exit code accordingly. does not handle all exceptions.
161 and return an exit code accordingly. does not handle all exceptions.
162 """
162 """
163 try:
163 try:
164 try:
164 try:
165 return func()
165 return func()
166 except: # re-raises
166 except: # re-raises
167 ui.traceback()
167 ui.traceback()
168 raise
168 raise
169 # Global exception handling, alphabetically
169 # Global exception handling, alphabetically
170 # Mercurial-specific first, followed by built-in and library exceptions
170 # Mercurial-specific first, followed by built-in and library exceptions
171 except error.LockHeld as inst:
171 except error.LockHeld as inst:
172 if inst.errno == errno.ETIMEDOUT:
172 if inst.errno == errno.ETIMEDOUT:
173 reason = _('timed out waiting for lock held by %r') % (
173 reason = _('timed out waiting for lock held by %r') % (
174 pycompat.bytestr(inst.locker))
174 pycompat.bytestr(inst.locker))
175 else:
175 else:
176 reason = _('lock held by %r') % inst.locker
176 reason = _('lock held by %r') % inst.locker
177 ui.error(_("abort: %s: %s\n") % (
177 ui.error(_("abort: %s: %s\n") % (
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
178 inst.desc or stringutil.forcebytestr(inst.filename), reason))
179 if not inst.locker:
179 if not inst.locker:
180 ui.error(_("(lock might be very busy)\n"))
180 ui.error(_("(lock might be very busy)\n"))
181 except error.LockUnavailable as inst:
181 except error.LockUnavailable as inst:
182 ui.error(_("abort: could not lock %s: %s\n") %
182 ui.error(_("abort: could not lock %s: %s\n") %
183 (inst.desc or stringutil.forcebytestr(inst.filename),
183 (inst.desc or stringutil.forcebytestr(inst.filename),
184 encoding.strtolocal(inst.strerror)))
184 encoding.strtolocal(inst.strerror)))
185 except error.OutOfBandError as inst:
185 except error.OutOfBandError as inst:
186 if inst.args:
186 if inst.args:
187 msg = _("abort: remote error:\n")
187 msg = _("abort: remote error:\n")
188 else:
188 else:
189 msg = _("abort: remote error\n")
189 msg = _("abort: remote error\n")
190 ui.error(msg)
190 ui.error(msg)
191 if inst.args:
191 if inst.args:
192 ui.error(''.join(inst.args))
192 ui.error(''.join(inst.args))
193 if inst.hint:
193 if inst.hint:
194 ui.error('(%s)\n' % inst.hint)
194 ui.error('(%s)\n' % inst.hint)
195 except error.RepoError as inst:
195 except error.RepoError as inst:
196 ui.error(_("abort: %s!\n") % inst)
196 ui.error(_("abort: %s!\n") % inst)
197 if inst.hint:
197 if inst.hint:
198 ui.error(_("(%s)\n") % inst.hint)
198 ui.error(_("(%s)\n") % inst.hint)
199 except error.ResponseError as inst:
199 except error.ResponseError as inst:
200 ui.error(_("abort: %s") % inst.args[0])
200 ui.error(_("abort: %s") % inst.args[0])
201 msg = inst.args[1]
201 msg = inst.args[1]
202 if isinstance(msg, type(u'')):
202 if isinstance(msg, type(u'')):
203 msg = pycompat.sysbytes(msg)
203 msg = pycompat.sysbytes(msg)
204 if not isinstance(msg, bytes):
204 if not isinstance(msg, bytes):
205 ui.error(" %r\n" % (msg,))
205 ui.error(" %r\n" % (msg,))
206 elif not msg:
206 elif not msg:
207 ui.error(_(" empty string\n"))
207 ui.error(_(" empty string\n"))
208 else:
208 else:
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
209 ui.error("\n%r\n" % pycompat.bytestr(stringutil.ellipsis(msg)))
210 except error.CensoredNodeError as inst:
210 except error.CensoredNodeError as inst:
211 ui.error(_("abort: file censored %s!\n") % inst)
211 ui.error(_("abort: file censored %s!\n") % inst)
212 except error.StorageError as inst:
212 except error.StorageError as inst:
213 ui.error(_("abort: %s!\n") % inst)
213 ui.error(_("abort: %s!\n") % inst)
214 if inst.hint:
214 if inst.hint:
215 ui.error(_("(%s)\n") % inst.hint)
215 ui.error(_("(%s)\n") % inst.hint)
216 except error.InterventionRequired as inst:
216 except error.InterventionRequired as inst:
217 ui.error("%s\n" % inst)
217 ui.error("%s\n" % inst)
218 if inst.hint:
218 if inst.hint:
219 ui.error(_("(%s)\n") % inst.hint)
219 ui.error(_("(%s)\n") % inst.hint)
220 return 1
220 return 1
221 except error.WdirUnsupported:
221 except error.WdirUnsupported:
222 ui.error(_("abort: working directory revision cannot be specified\n"))
222 ui.error(_("abort: working directory revision cannot be specified\n"))
223 except error.Abort as inst:
223 except error.Abort as inst:
224 ui.error(_("abort: %s\n") % inst)
224 ui.error(_("abort: %s\n") % inst)
225 if inst.hint:
225 if inst.hint:
226 ui.error(_("(%s)\n") % inst.hint)
226 ui.error(_("(%s)\n") % inst.hint)
227 except ImportError as inst:
227 except ImportError as inst:
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
228 ui.error(_("abort: %s!\n") % stringutil.forcebytestr(inst))
229 m = stringutil.forcebytestr(inst).split()[-1]
229 m = stringutil.forcebytestr(inst).split()[-1]
230 if m in "mpatch bdiff".split():
230 if m in "mpatch bdiff".split():
231 ui.error(_("(did you forget to compile extensions?)\n"))
231 ui.error(_("(did you forget to compile extensions?)\n"))
232 elif m in "zlib".split():
232 elif m in "zlib".split():
233 ui.error(_("(is your Python install correct?)\n"))
233 ui.error(_("(is your Python install correct?)\n"))
234 except (IOError, OSError) as inst:
234 except (IOError, OSError) as inst:
235 if util.safehasattr(inst, "code"): # HTTPError
235 if util.safehasattr(inst, "code"): # HTTPError
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
236 ui.error(_("abort: %s\n") % stringutil.forcebytestr(inst))
237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
237 elif util.safehasattr(inst, "reason"): # URLError or SSLError
238 try: # usually it is in the form (errno, strerror)
238 try: # usually it is in the form (errno, strerror)
239 reason = inst.reason.args[1]
239 reason = inst.reason.args[1]
240 except (AttributeError, IndexError):
240 except (AttributeError, IndexError):
241 # it might be anything, for example a string
241 # it might be anything, for example a string
242 reason = inst.reason
242 reason = inst.reason
243 if isinstance(reason, pycompat.unicode):
243 if isinstance(reason, pycompat.unicode):
244 # SSLError of Python 2.7.9 contains a unicode
244 # SSLError of Python 2.7.9 contains a unicode
245 reason = encoding.unitolocal(reason)
245 reason = encoding.unitolocal(reason)
246 ui.error(_("abort: error: %s\n") % reason)
246 ui.error(_("abort: error: %s\n") % reason)
247 elif (util.safehasattr(inst, "args")
247 elif (util.safehasattr(inst, "args")
248 and inst.args and inst.args[0] == errno.EPIPE):
248 and inst.args and inst.args[0] == errno.EPIPE):
249 pass
249 pass
250 elif getattr(inst, "strerror", None): # common IOError or OSError
250 elif getattr(inst, "strerror", None): # common IOError or OSError
251 if getattr(inst, "filename", None) is not None:
251 if getattr(inst, "filename", None) is not None:
252 ui.error(_("abort: %s: '%s'\n") % (
252 ui.error(_("abort: %s: '%s'\n") % (
253 encoding.strtolocal(inst.strerror),
253 encoding.strtolocal(inst.strerror),
254 stringutil.forcebytestr(inst.filename)))
254 stringutil.forcebytestr(inst.filename)))
255 else:
255 else:
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
256 ui.error(_("abort: %s\n") % encoding.strtolocal(inst.strerror))
257 else: # suspicious IOError
257 else: # suspicious IOError
258 raise
258 raise
259 except MemoryError:
259 except MemoryError:
260 ui.error(_("abort: out of memory\n"))
260 ui.error(_("abort: out of memory\n"))
261 except SystemExit as inst:
261 except SystemExit as inst:
262 # Commands shouldn't sys.exit directly, but give a return code.
262 # Commands shouldn't sys.exit directly, but give a return code.
263 # Just in case catch this and and pass exit code to caller.
263 # Just in case catch this and and pass exit code to caller.
264 return inst.code
264 return inst.code
265
265
266 return -1
266 return -1
267
267
268 def checknewlabel(repo, lbl, kind):
268 def checknewlabel(repo, lbl, kind):
269 # Do not use the "kind" parameter in ui output.
269 # Do not use the "kind" parameter in ui output.
270 # It makes strings difficult to translate.
270 # It makes strings difficult to translate.
271 if lbl in ['tip', '.', 'null']:
271 if lbl in ['tip', '.', 'null']:
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
272 raise error.Abort(_("the name '%s' is reserved") % lbl)
273 for c in (':', '\0', '\n', '\r'):
273 for c in (':', '\0', '\n', '\r'):
274 if c in lbl:
274 if c in lbl:
275 raise error.Abort(
275 raise error.Abort(
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
276 _("%r cannot be used in a name") % pycompat.bytestr(c))
277 try:
277 try:
278 int(lbl)
278 int(lbl)
279 raise error.Abort(_("cannot use an integer as a name"))
279 raise error.Abort(_("cannot use an integer as a name"))
280 except ValueError:
280 except ValueError:
281 pass
281 pass
282 if lbl.strip() != lbl:
282 if lbl.strip() != lbl:
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
283 raise error.Abort(_("leading or trailing whitespace in name %r") % lbl)
284
284
285 def checkfilename(f):
285 def checkfilename(f):
286 '''Check that the filename f is an acceptable filename for a tracked file'''
286 '''Check that the filename f is an acceptable filename for a tracked file'''
287 if '\r' in f or '\n' in f:
287 if '\r' in f or '\n' in f:
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
288 raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r")
289 % pycompat.bytestr(f))
289 % pycompat.bytestr(f))
290
290
291 def checkportable(ui, f):
291 def checkportable(ui, f):
292 '''Check if filename f is portable and warn or abort depending on config'''
292 '''Check if filename f is portable and warn or abort depending on config'''
293 checkfilename(f)
293 checkfilename(f)
294 abort, warn = checkportabilityalert(ui)
294 abort, warn = checkportabilityalert(ui)
295 if abort or warn:
295 if abort or warn:
296 msg = util.checkwinfilename(f)
296 msg = util.checkwinfilename(f)
297 if msg:
297 if msg:
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
298 msg = "%s: %s" % (msg, procutil.shellquote(f))
299 if abort:
299 if abort:
300 raise error.Abort(msg)
300 raise error.Abort(msg)
301 ui.warn(_("warning: %s\n") % msg)
301 ui.warn(_("warning: %s\n") % msg)
302
302
303 def checkportabilityalert(ui):
303 def checkportabilityalert(ui):
304 '''check if the user's config requests nothing, a warning, or abort for
304 '''check if the user's config requests nothing, a warning, or abort for
305 non-portable filenames'''
305 non-portable filenames'''
306 val = ui.config('ui', 'portablefilenames')
306 val = ui.config('ui', 'portablefilenames')
307 lval = val.lower()
307 lval = val.lower()
308 bval = stringutil.parsebool(val)
308 bval = stringutil.parsebool(val)
309 abort = pycompat.iswindows or lval == 'abort'
309 abort = pycompat.iswindows or lval == 'abort'
310 warn = bval or lval == 'warn'
310 warn = bval or lval == 'warn'
311 if bval is None and not (warn or abort or lval == 'ignore'):
311 if bval is None and not (warn or abort or lval == 'ignore'):
312 raise error.ConfigError(
312 raise error.ConfigError(
313 _("ui.portablefilenames value is invalid ('%s')") % val)
313 _("ui.portablefilenames value is invalid ('%s')") % val)
314 return abort, warn
314 return abort, warn
315
315
316 class casecollisionauditor(object):
316 class casecollisionauditor(object):
317 def __init__(self, ui, abort, dirstate):
317 def __init__(self, ui, abort, dirstate):
318 self._ui = ui
318 self._ui = ui
319 self._abort = abort
319 self._abort = abort
320 allfiles = '\0'.join(dirstate._map)
320 allfiles = '\0'.join(dirstate._map)
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
321 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
322 self._dirstate = dirstate
322 self._dirstate = dirstate
323 # The purpose of _newfiles is so that we don't complain about
323 # The purpose of _newfiles is so that we don't complain about
324 # case collisions if someone were to call this object with the
324 # case collisions if someone were to call this object with the
325 # same filename twice.
325 # same filename twice.
326 self._newfiles = set()
326 self._newfiles = set()
327
327
328 def __call__(self, f):
328 def __call__(self, f):
329 if f in self._newfiles:
329 if f in self._newfiles:
330 return
330 return
331 fl = encoding.lower(f)
331 fl = encoding.lower(f)
332 if fl in self._loweredfiles and f not in self._dirstate:
332 if fl in self._loweredfiles and f not in self._dirstate:
333 msg = _('possible case-folding collision for %s') % f
333 msg = _('possible case-folding collision for %s') % f
334 if self._abort:
334 if self._abort:
335 raise error.Abort(msg)
335 raise error.Abort(msg)
336 self._ui.warn(_("warning: %s\n") % msg)
336 self._ui.warn(_("warning: %s\n") % msg)
337 self._loweredfiles.add(fl)
337 self._loweredfiles.add(fl)
338 self._newfiles.add(f)
338 self._newfiles.add(f)
339
339
340 def filteredhash(repo, maxrev):
340 def filteredhash(repo, maxrev):
341 """build hash of filtered revisions in the current repoview.
341 """build hash of filtered revisions in the current repoview.
342
342
343 Multiple caches perform up-to-date validation by checking that the
343 Multiple caches perform up-to-date validation by checking that the
344 tiprev and tipnode stored in the cache file match the current repository.
344 tiprev and tipnode stored in the cache file match the current repository.
345 However, this is not sufficient for validating repoviews because the set
345 However, this is not sufficient for validating repoviews because the set
346 of revisions in the view may change without the repository tiprev and
346 of revisions in the view may change without the repository tiprev and
347 tipnode changing.
347 tipnode changing.
348
348
349 This function hashes all the revs filtered from the view and returns
349 This function hashes all the revs filtered from the view and returns
350 that SHA-1 digest.
350 that SHA-1 digest.
351 """
351 """
352 cl = repo.changelog
352 cl = repo.changelog
353 if not cl.filteredrevs:
353 if not cl.filteredrevs:
354 return None
354 return None
355 key = None
355 key = None
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
356 revs = sorted(r for r in cl.filteredrevs if r <= maxrev)
357 if revs:
357 if revs:
358 s = hashlib.sha1()
358 s = hashlib.sha1()
359 for rev in revs:
359 for rev in revs:
360 s.update('%d;' % rev)
360 s.update('%d;' % rev)
361 key = s.digest()
361 key = s.digest()
362 return key
362 return key
363
363
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
364 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
365 '''yield every hg repository under path, always recursively.
365 '''yield every hg repository under path, always recursively.
366 The recurse flag will only control recursion into repo working dirs'''
366 The recurse flag will only control recursion into repo working dirs'''
367 def errhandler(err):
367 def errhandler(err):
368 if err.filename == path:
368 if err.filename == path:
369 raise err
369 raise err
370 samestat = getattr(os.path, 'samestat', None)
370 samestat = getattr(os.path, 'samestat', None)
371 if followsym and samestat is not None:
371 if followsym and samestat is not None:
372 def adddir(dirlst, dirname):
372 def adddir(dirlst, dirname):
373 dirstat = os.stat(dirname)
373 dirstat = os.stat(dirname)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
374 match = any(samestat(dirstat, lstdirstat) for lstdirstat in dirlst)
375 if not match:
375 if not match:
376 dirlst.append(dirstat)
376 dirlst.append(dirstat)
377 return not match
377 return not match
378 else:
378 else:
379 followsym = False
379 followsym = False
380
380
381 if (seen_dirs is None) and followsym:
381 if (seen_dirs is None) and followsym:
382 seen_dirs = []
382 seen_dirs = []
383 adddir(seen_dirs, path)
383 adddir(seen_dirs, path)
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
384 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
385 dirs.sort()
385 dirs.sort()
386 if '.hg' in dirs:
386 if '.hg' in dirs:
387 yield root # found a repository
387 yield root # found a repository
388 qroot = os.path.join(root, '.hg', 'patches')
388 qroot = os.path.join(root, '.hg', 'patches')
389 if os.path.isdir(os.path.join(qroot, '.hg')):
389 if os.path.isdir(os.path.join(qroot, '.hg')):
390 yield qroot # we have a patch queue repo here
390 yield qroot # we have a patch queue repo here
391 if recurse:
391 if recurse:
392 # avoid recursing inside the .hg directory
392 # avoid recursing inside the .hg directory
393 dirs.remove('.hg')
393 dirs.remove('.hg')
394 else:
394 else:
395 dirs[:] = [] # don't descend further
395 dirs[:] = [] # don't descend further
396 elif followsym:
396 elif followsym:
397 newdirs = []
397 newdirs = []
398 for d in dirs:
398 for d in dirs:
399 fname = os.path.join(root, d)
399 fname = os.path.join(root, d)
400 if adddir(seen_dirs, fname):
400 if adddir(seen_dirs, fname):
401 if os.path.islink(fname):
401 if os.path.islink(fname):
402 for hgname in walkrepos(fname, True, seen_dirs):
402 for hgname in walkrepos(fname, True, seen_dirs):
403 yield hgname
403 yield hgname
404 else:
404 else:
405 newdirs.append(d)
405 newdirs.append(d)
406 dirs[:] = newdirs
406 dirs[:] = newdirs
407
407
408 def binnode(ctx):
408 def binnode(ctx):
409 """Return binary node id for a given basectx"""
409 """Return binary node id for a given basectx"""
410 node = ctx.node()
410 node = ctx.node()
411 if node is None:
411 if node is None:
412 return wdirid
412 return wdirid
413 return node
413 return node
414
414
415 def intrev(ctx):
415 def intrev(ctx):
416 """Return integer for a given basectx that can be used in comparison or
416 """Return integer for a given basectx that can be used in comparison or
417 arithmetic operation"""
417 arithmetic operation"""
418 rev = ctx.rev()
418 rev = ctx.rev()
419 if rev is None:
419 if rev is None:
420 return wdirrev
420 return wdirrev
421 return rev
421 return rev
422
422
423 def formatchangeid(ctx):
423 def formatchangeid(ctx):
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
424 """Format changectx as '{rev}:{node|formatnode}', which is the default
425 template provided by logcmdutil.changesettemplater"""
425 template provided by logcmdutil.changesettemplater"""
426 repo = ctx.repo()
426 repo = ctx.repo()
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
427 return formatrevnode(repo.ui, intrev(ctx), binnode(ctx))
428
428
429 def formatrevnode(ui, rev, node):
429 def formatrevnode(ui, rev, node):
430 """Format given revision and node depending on the current verbosity"""
430 """Format given revision and node depending on the current verbosity"""
431 if ui.debugflag:
431 if ui.debugflag:
432 hexfunc = hex
432 hexfunc = hex
433 else:
433 else:
434 hexfunc = short
434 hexfunc = short
435 return '%d:%s' % (rev, hexfunc(node))
435 return '%d:%s' % (rev, hexfunc(node))
436
436
437 def resolvehexnodeidprefix(repo, prefix):
437 def resolvehexnodeidprefix(repo, prefix):
438 if (prefix.startswith('x') and
438 if (prefix.startswith('x') and
439 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
439 repo.ui.configbool('experimental', 'revisions.prefixhexnode')):
440 prefix = prefix[1:]
440 prefix = prefix[1:]
441 try:
441 try:
442 # Uses unfiltered repo because it's faster when prefix is ambiguous/
442 # Uses unfiltered repo because it's faster when prefix is ambiguous/
443 # This matches the shortesthexnodeidprefix() function below.
443 # This matches the shortesthexnodeidprefix() function below.
444 node = repo.unfiltered().changelog._partialmatch(prefix)
444 node = repo.unfiltered().changelog._partialmatch(prefix)
445 except error.AmbiguousPrefixLookupError:
445 except error.AmbiguousPrefixLookupError:
446 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
446 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
447 if revset:
447 if revset:
448 # Clear config to avoid infinite recursion
448 # Clear config to avoid infinite recursion
449 configoverrides = {('experimental',
449 configoverrides = {('experimental',
450 'revisions.disambiguatewithin'): None}
450 'revisions.disambiguatewithin'): None}
451 with repo.ui.configoverride(configoverrides):
451 with repo.ui.configoverride(configoverrides):
452 revs = repo.anyrevs([revset], user=True)
452 revs = repo.anyrevs([revset], user=True)
453 matches = []
453 matches = []
454 for rev in revs:
454 for rev in revs:
455 node = repo.changelog.node(rev)
455 node = repo.changelog.node(rev)
456 if hex(node).startswith(prefix):
456 if hex(node).startswith(prefix):
457 matches.append(node)
457 matches.append(node)
458 if len(matches) == 1:
458 if len(matches) == 1:
459 return matches[0]
459 return matches[0]
460 raise
460 raise
461 if node is None:
461 if node is None:
462 return
462 return
463 repo.changelog.rev(node) # make sure node isn't filtered
463 repo.changelog.rev(node) # make sure node isn't filtered
464 return node
464 return node
465
465
466 def mayberevnum(repo, prefix):
466 def mayberevnum(repo, prefix):
467 """Checks if the given prefix may be mistaken for a revision number"""
467 """Checks if the given prefix may be mistaken for a revision number"""
468 try:
468 try:
469 i = int(prefix)
469 i = int(prefix)
470 # if we are a pure int, then starting with zero will not be
470 # if we are a pure int, then starting with zero will not be
471 # confused as a rev; or, obviously, if the int is larger
471 # confused as a rev; or, obviously, if the int is larger
472 # than the value of the tip rev. We still need to disambiguate if
472 # than the value of the tip rev. We still need to disambiguate if
473 # prefix == '0', since that *is* a valid revnum.
473 # prefix == '0', since that *is* a valid revnum.
474 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
474 if (prefix != b'0' and prefix[0:1] == b'0') or i >= len(repo):
475 return False
475 return False
476 return True
476 return True
477 except ValueError:
477 except ValueError:
478 return False
478 return False
479
479
480 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
480 def shortesthexnodeidprefix(repo, node, minlength=1, cache=None):
481 """Find the shortest unambiguous prefix that matches hexnode.
481 """Find the shortest unambiguous prefix that matches hexnode.
482
482
483 If "cache" is not None, it must be a dictionary that can be used for
483 If "cache" is not None, it must be a dictionary that can be used for
484 caching between calls to this method.
484 caching between calls to this method.
485 """
485 """
486 # _partialmatch() of filtered changelog could take O(len(repo)) time,
486 # _partialmatch() of filtered changelog could take O(len(repo)) time,
487 # which would be unacceptably slow. so we look for hash collision in
487 # which would be unacceptably slow. so we look for hash collision in
488 # unfiltered space, which means some hashes may be slightly longer.
488 # unfiltered space, which means some hashes may be slightly longer.
489
489
490 minlength=max(minlength, 1)
490 minlength=max(minlength, 1)
491
491
492 def disambiguate(prefix):
492 def disambiguate(prefix):
493 """Disambiguate against revnums."""
493 """Disambiguate against revnums."""
494 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
494 if repo.ui.configbool('experimental', 'revisions.prefixhexnode'):
495 if mayberevnum(repo, prefix):
495 if mayberevnum(repo, prefix):
496 return 'x' + prefix
496 return 'x' + prefix
497 else:
497 else:
498 return prefix
498 return prefix
499
499
500 hexnode = hex(node)
500 hexnode = hex(node)
501 for length in range(len(prefix), len(hexnode) + 1):
501 for length in range(len(prefix), len(hexnode) + 1):
502 prefix = hexnode[:length]
502 prefix = hexnode[:length]
503 if not mayberevnum(repo, prefix):
503 if not mayberevnum(repo, prefix):
504 return prefix
504 return prefix
505
505
506 cl = repo.unfiltered().changelog
506 cl = repo.unfiltered().changelog
507 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
507 revset = repo.ui.config('experimental', 'revisions.disambiguatewithin')
508 if revset:
508 if revset:
509 revs = None
509 revs = None
510 if cache is not None:
510 if cache is not None:
511 revs = cache.get('disambiguationrevset')
511 revs = cache.get('disambiguationrevset')
512 if revs is None:
512 if revs is None:
513 revs = repo.anyrevs([revset], user=True)
513 revs = repo.anyrevs([revset], user=True)
514 if cache is not None:
514 if cache is not None:
515 cache['disambiguationrevset'] = revs
515 cache['disambiguationrevset'] = revs
516 if cl.rev(node) in revs:
516 if cl.rev(node) in revs:
517 hexnode = hex(node)
517 hexnode = hex(node)
518 nodetree = None
518 nodetree = None
519 if cache is not None:
519 if cache is not None:
520 nodetree = cache.get('disambiguationnodetree')
520 nodetree = cache.get('disambiguationnodetree')
521 if not nodetree:
521 if not nodetree:
522 try:
522 try:
523 nodetree = parsers.nodetree(cl.index, len(revs))
523 nodetree = parsers.nodetree(cl.index, len(revs))
524 except AttributeError:
524 except AttributeError:
525 # no native nodetree
525 # no native nodetree
526 pass
526 pass
527 else:
527 else:
528 for r in revs:
528 for r in revs:
529 nodetree.insert(r)
529 nodetree.insert(r)
530 if cache is not None:
530 if cache is not None:
531 cache['disambiguationnodetree'] = nodetree
531 cache['disambiguationnodetree'] = nodetree
532 if nodetree is not None:
532 if nodetree is not None:
533 length = max(nodetree.shortest(node), minlength)
533 length = max(nodetree.shortest(node), minlength)
534 prefix = hexnode[:length]
534 prefix = hexnode[:length]
535 return disambiguate(prefix)
535 return disambiguate(prefix)
536 for length in range(minlength, len(hexnode) + 1):
536 for length in range(minlength, len(hexnode) + 1):
537 matches = []
537 matches = []
538 prefix = hexnode[:length]
538 prefix = hexnode[:length]
539 for rev in revs:
539 for rev in revs:
540 otherhexnode = repo[rev].hex()
540 otherhexnode = repo[rev].hex()
541 if prefix == otherhexnode[:length]:
541 if prefix == otherhexnode[:length]:
542 matches.append(otherhexnode)
542 matches.append(otherhexnode)
543 if len(matches) == 1:
543 if len(matches) == 1:
544 return disambiguate(prefix)
544 return disambiguate(prefix)
545
545
546 try:
546 try:
547 return disambiguate(cl.shortest(node, minlength))
547 return disambiguate(cl.shortest(node, minlength))
548 except error.LookupError:
548 except error.LookupError:
549 raise error.RepoLookupError()
549 raise error.RepoLookupError()
550
550
551 def isrevsymbol(repo, symbol):
551 def isrevsymbol(repo, symbol):
552 """Checks if a symbol exists in the repo.
552 """Checks if a symbol exists in the repo.
553
553
554 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
554 See revsymbol() for details. Raises error.AmbiguousPrefixLookupError if the
555 symbol is an ambiguous nodeid prefix.
555 symbol is an ambiguous nodeid prefix.
556 """
556 """
557 try:
557 try:
558 revsymbol(repo, symbol)
558 revsymbol(repo, symbol)
559 return True
559 return True
560 except error.RepoLookupError:
560 except error.RepoLookupError:
561 return False
561 return False
562
562
563 def revsymbol(repo, symbol):
563 def revsymbol(repo, symbol):
564 """Returns a context given a single revision symbol (as string).
564 """Returns a context given a single revision symbol (as string).
565
565
566 This is similar to revsingle(), but accepts only a single revision symbol,
566 This is similar to revsingle(), but accepts only a single revision symbol,
567 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
567 i.e. things like ".", "tip", "1234", "deadbeef", "my-bookmark" work, but
568 not "max(public())".
568 not "max(public())".
569 """
569 """
570 if not isinstance(symbol, bytes):
570 if not isinstance(symbol, bytes):
571 msg = ("symbol (%s of type %s) was not a string, did you mean "
571 msg = ("symbol (%s of type %s) was not a string, did you mean "
572 "repo[symbol]?" % (symbol, type(symbol)))
572 "repo[symbol]?" % (symbol, type(symbol)))
573 raise error.ProgrammingError(msg)
573 raise error.ProgrammingError(msg)
574 try:
574 try:
575 if symbol in ('.', 'tip', 'null'):
575 if symbol in ('.', 'tip', 'null'):
576 return repo[symbol]
576 return repo[symbol]
577
577
578 try:
578 try:
579 r = int(symbol)
579 r = int(symbol)
580 if '%d' % r != symbol:
580 if '%d' % r != symbol:
581 raise ValueError
581 raise ValueError
582 l = len(repo.changelog)
582 l = len(repo.changelog)
583 if r < 0:
583 if r < 0:
584 r += l
584 r += l
585 if r < 0 or r >= l and r != wdirrev:
585 if r < 0 or r >= l and r != wdirrev:
586 raise ValueError
586 raise ValueError
587 return repo[r]
587 return repo[r]
588 except error.FilteredIndexError:
588 except error.FilteredIndexError:
589 raise
589 raise
590 except (ValueError, OverflowError, IndexError):
590 except (ValueError, OverflowError, IndexError):
591 pass
591 pass
592
592
593 if len(symbol) == 40:
593 if len(symbol) == 40:
594 try:
594 try:
595 node = bin(symbol)
595 node = bin(symbol)
596 rev = repo.changelog.rev(node)
596 rev = repo.changelog.rev(node)
597 return repo[rev]
597 return repo[rev]
598 except error.FilteredLookupError:
598 except error.FilteredLookupError:
599 raise
599 raise
600 except (TypeError, LookupError):
600 except (TypeError, LookupError):
601 pass
601 pass
602
602
603 # look up bookmarks through the name interface
603 # look up bookmarks through the name interface
604 try:
604 try:
605 node = repo.names.singlenode(repo, symbol)
605 node = repo.names.singlenode(repo, symbol)
606 rev = repo.changelog.rev(node)
606 rev = repo.changelog.rev(node)
607 return repo[rev]
607 return repo[rev]
608 except KeyError:
608 except KeyError:
609 pass
609 pass
610
610
611 node = resolvehexnodeidprefix(repo, symbol)
611 node = resolvehexnodeidprefix(repo, symbol)
612 if node is not None:
612 if node is not None:
613 rev = repo.changelog.rev(node)
613 rev = repo.changelog.rev(node)
614 return repo[rev]
614 return repo[rev]
615
615
616 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
616 raise error.RepoLookupError(_("unknown revision '%s'") % symbol)
617
617
618 except error.WdirUnsupported:
618 except error.WdirUnsupported:
619 return repo[None]
619 return repo[None]
620 except (error.FilteredIndexError, error.FilteredLookupError,
620 except (error.FilteredIndexError, error.FilteredLookupError,
621 error.FilteredRepoLookupError):
621 error.FilteredRepoLookupError):
622 raise _filterederror(repo, symbol)
622 raise _filterederror(repo, symbol)
623
623
624 def _filterederror(repo, changeid):
624 def _filterederror(repo, changeid):
625 """build an exception to be raised about a filtered changeid
625 """build an exception to be raised about a filtered changeid
626
626
627 This is extracted in a function to help extensions (eg: evolve) to
627 This is extracted in a function to help extensions (eg: evolve) to
628 experiment with various message variants."""
628 experiment with various message variants."""
629 if repo.filtername.startswith('visible'):
629 if repo.filtername.startswith('visible'):
630
630
631 # Check if the changeset is obsolete
631 # Check if the changeset is obsolete
632 unfilteredrepo = repo.unfiltered()
632 unfilteredrepo = repo.unfiltered()
633 ctx = revsymbol(unfilteredrepo, changeid)
633 ctx = revsymbol(unfilteredrepo, changeid)
634
634
635 # If the changeset is obsolete, enrich the message with the reason
635 # If the changeset is obsolete, enrich the message with the reason
636 # that made this changeset not visible
636 # that made this changeset not visible
637 if ctx.obsolete():
637 if ctx.obsolete():
638 msg = obsutil._getfilteredreason(repo, changeid, ctx)
638 msg = obsutil._getfilteredreason(repo, changeid, ctx)
639 else:
639 else:
640 msg = _("hidden revision '%s'") % changeid
640 msg = _("hidden revision '%s'") % changeid
641
641
642 hint = _('use --hidden to access hidden revisions')
642 hint = _('use --hidden to access hidden revisions')
643
643
644 return error.FilteredRepoLookupError(msg, hint=hint)
644 return error.FilteredRepoLookupError(msg, hint=hint)
645 msg = _("filtered revision '%s' (not in '%s' subset)")
645 msg = _("filtered revision '%s' (not in '%s' subset)")
646 msg %= (changeid, repo.filtername)
646 msg %= (changeid, repo.filtername)
647 return error.FilteredRepoLookupError(msg)
647 return error.FilteredRepoLookupError(msg)
648
648
649 def revsingle(repo, revspec, default='.', localalias=None):
649 def revsingle(repo, revspec, default='.', localalias=None):
650 if not revspec and revspec != 0:
650 if not revspec and revspec != 0:
651 return repo[default]
651 return repo[default]
652
652
653 l = revrange(repo, [revspec], localalias=localalias)
653 l = revrange(repo, [revspec], localalias=localalias)
654 if not l:
654 if not l:
655 raise error.Abort(_('empty revision set'))
655 raise error.Abort(_('empty revision set'))
656 return repo[l.last()]
656 return repo[l.last()]
657
657
658 def _pairspec(revspec):
658 def _pairspec(revspec):
659 tree = revsetlang.parse(revspec)
659 tree = revsetlang.parse(revspec)
660 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
660 return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall')
661
661
662 def revpair(repo, revs):
662 def revpair(repo, revs):
663 if not revs:
663 if not revs:
664 return repo['.'], repo[None]
664 return repo['.'], repo[None]
665
665
666 l = revrange(repo, revs)
666 l = revrange(repo, revs)
667
667
668 if not l:
668 if not l:
669 raise error.Abort(_('empty revision range'))
669 raise error.Abort(_('empty revision range'))
670
670
671 first = l.first()
671 first = l.first()
672 second = l.last()
672 second = l.last()
673
673
674 if (first == second and len(revs) >= 2
674 if (first == second and len(revs) >= 2
675 and not all(revrange(repo, [r]) for r in revs)):
675 and not all(revrange(repo, [r]) for r in revs)):
676 raise error.Abort(_('empty revision on one side of range'))
676 raise error.Abort(_('empty revision on one side of range'))
677
677
678 # if top-level is range expression, the result must always be a pair
678 # if top-level is range expression, the result must always be a pair
679 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
679 if first == second and len(revs) == 1 and not _pairspec(revs[0]):
680 return repo[first], repo[None]
680 return repo[first], repo[None]
681
681
682 return repo[first], repo[second]
682 return repo[first], repo[second]
683
683
684 def revrange(repo, specs, localalias=None):
684 def revrange(repo, specs, localalias=None):
685 """Execute 1 to many revsets and return the union.
685 """Execute 1 to many revsets and return the union.
686
686
687 This is the preferred mechanism for executing revsets using user-specified
687 This is the preferred mechanism for executing revsets using user-specified
688 config options, such as revset aliases.
688 config options, such as revset aliases.
689
689
690 The revsets specified by ``specs`` will be executed via a chained ``OR``
690 The revsets specified by ``specs`` will be executed via a chained ``OR``
691 expression. If ``specs`` is empty, an empty result is returned.
691 expression. If ``specs`` is empty, an empty result is returned.
692
692
693 ``specs`` can contain integers, in which case they are assumed to be
693 ``specs`` can contain integers, in which case they are assumed to be
694 revision numbers.
694 revision numbers.
695
695
696 It is assumed the revsets are already formatted. If you have arguments
696 It is assumed the revsets are already formatted. If you have arguments
697 that need to be expanded in the revset, call ``revsetlang.formatspec()``
697 that need to be expanded in the revset, call ``revsetlang.formatspec()``
698 and pass the result as an element of ``specs``.
698 and pass the result as an element of ``specs``.
699
699
700 Specifying a single revset is allowed.
700 Specifying a single revset is allowed.
701
701
702 Returns a ``revset.abstractsmartset`` which is a list-like interface over
702 Returns a ``revset.abstractsmartset`` which is a list-like interface over
703 integer revisions.
703 integer revisions.
704 """
704 """
705 allspecs = []
705 allspecs = []
706 for spec in specs:
706 for spec in specs:
707 if isinstance(spec, int):
707 if isinstance(spec, int):
708 spec = revsetlang.formatspec('%d', spec)
708 spec = revsetlang.formatspec('%d', spec)
709 allspecs.append(spec)
709 allspecs.append(spec)
710 return repo.anyrevs(allspecs, user=True, localalias=localalias)
710 return repo.anyrevs(allspecs, user=True, localalias=localalias)
711
711
712 def meaningfulparents(repo, ctx):
712 def meaningfulparents(repo, ctx):
713 """Return list of meaningful (or all if debug) parentrevs for rev.
713 """Return list of meaningful (or all if debug) parentrevs for rev.
714
714
715 For merges (two non-nullrev revisions) both parents are meaningful.
715 For merges (two non-nullrev revisions) both parents are meaningful.
716 Otherwise the first parent revision is considered meaningful if it
716 Otherwise the first parent revision is considered meaningful if it
717 is not the preceding revision.
717 is not the preceding revision.
718 """
718 """
719 parents = ctx.parents()
719 parents = ctx.parents()
720 if len(parents) > 1:
720 if len(parents) > 1:
721 return parents
721 return parents
722 if repo.ui.debugflag:
722 if repo.ui.debugflag:
723 return [parents[0], repo[nullrev]]
723 return [parents[0], repo[nullrev]]
724 if parents[0].rev() >= intrev(ctx) - 1:
724 if parents[0].rev() >= intrev(ctx) - 1:
725 return []
725 return []
726 return parents
726 return parents
727
727
728 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
728 def getuipathfn(repo, legacyrelativevalue=False, forcerelativevalue=None):
729 """Return a function that produced paths for presenting to the user.
729 """Return a function that produced paths for presenting to the user.
730
730
731 The returned function takes a repo-relative path and produces a path
731 The returned function takes a repo-relative path and produces a path
732 that can be presented in the UI.
732 that can be presented in the UI.
733
733
734 Depending on the value of ui.relative-paths, either a repo-relative or
734 Depending on the value of ui.relative-paths, either a repo-relative or
735 cwd-relative path will be produced.
735 cwd-relative path will be produced.
736
736
737 legacyrelativevalue is the value to use if ui.relative-paths=legacy
737 legacyrelativevalue is the value to use if ui.relative-paths=legacy
738
738
739 If forcerelativevalue is not None, then that value will be used regardless
739 If forcerelativevalue is not None, then that value will be used regardless
740 of what ui.relative-paths is set to.
740 of what ui.relative-paths is set to.
741 """
741 """
742 if forcerelativevalue is not None:
742 if forcerelativevalue is not None:
743 relative = forcerelativevalue
743 relative = forcerelativevalue
744 else:
744 else:
745 config = repo.ui.config('ui', 'relative-paths')
745 config = repo.ui.config('ui', 'relative-paths')
746 if config == 'legacy':
746 if config == 'legacy':
747 relative = legacyrelativevalue
747 relative = legacyrelativevalue
748 else:
748 else:
749 relative = stringutil.parsebool(config)
749 relative = stringutil.parsebool(config)
750 if relative is None:
750 if relative is None:
751 raise error.ConfigError(
751 raise error.ConfigError(
752 _("ui.relative-paths is not a boolean ('%s')") % config)
752 _("ui.relative-paths is not a boolean ('%s')") % config)
753
753
754 if relative:
754 if relative:
755 cwd = repo.getcwd()
755 cwd = repo.getcwd()
756 pathto = repo.pathto
756 pathto = repo.pathto
757 return lambda f: pathto(f, cwd)
757 return lambda f: pathto(f, cwd)
758 else:
758 else:
759 return lambda f: f
759 return lambda f: f
760
760
761 def expandpats(pats):
761 def expandpats(pats):
762 '''Expand bare globs when running on windows.
762 '''Expand bare globs when running on windows.
763 On posix we assume it already has already been done by sh.'''
763 On posix we assume it already has already been done by sh.'''
764 if not util.expandglobs:
764 if not util.expandglobs:
765 return list(pats)
765 return list(pats)
766 ret = []
766 ret = []
767 for kindpat in pats:
767 for kindpat in pats:
768 kind, pat = matchmod._patsplit(kindpat, None)
768 kind, pat = matchmod._patsplit(kindpat, None)
769 if kind is None:
769 if kind is None:
770 try:
770 try:
771 globbed = glob.glob(pat)
771 globbed = glob.glob(pat)
772 except re.error:
772 except re.error:
773 globbed = [pat]
773 globbed = [pat]
774 if globbed:
774 if globbed:
775 ret.extend(globbed)
775 ret.extend(globbed)
776 continue
776 continue
777 ret.append(kindpat)
777 ret.append(kindpat)
778 return ret
778 return ret
779
779
780 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
780 def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath',
781 badfn=None):
781 badfn=None):
782 '''Return a matcher and the patterns that were used.
782 '''Return a matcher and the patterns that were used.
783 The matcher will warn about bad matches, unless an alternate badfn callback
783 The matcher will warn about bad matches, unless an alternate badfn callback
784 is provided.'''
784 is provided.'''
785 if pats == ("",):
785 if pats == ("",):
786 pats = []
786 pats = []
787 if opts is None:
787 if opts is None:
788 opts = {}
788 opts = {}
789 if not globbed and default == 'relpath':
789 if not globbed and default == 'relpath':
790 pats = expandpats(pats or [])
790 pats = expandpats(pats or [])
791
791
792 def bad(f, msg):
792 def bad(f, msg):
793 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
793 ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg))
794
794
795 if badfn is None:
795 if badfn is None:
796 badfn = bad
796 badfn = bad
797
797
798 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
798 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
799 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
799 default, listsubrepos=opts.get('subrepos'), badfn=badfn)
800
800
801 if m.always():
801 if m.always():
802 pats = []
802 pats = []
803 return m, pats
803 return m, pats
804
804
805 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
805 def match(ctx, pats=(), opts=None, globbed=False, default='relpath',
806 badfn=None):
806 badfn=None):
807 '''Return a matcher that will warn about bad matches.'''
807 '''Return a matcher that will warn about bad matches.'''
808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
808 return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0]
809
809
810 def matchall(repo):
810 def matchall(repo):
811 '''Return a matcher that will efficiently match everything.'''
811 '''Return a matcher that will efficiently match everything.'''
812 return matchmod.always(repo.root, repo.getcwd())
812 return matchmod.always(repo.root, repo.getcwd())
813
813
814 def matchfiles(repo, files, badfn=None):
814 def matchfiles(repo, files, badfn=None):
815 '''Return a matcher that will efficiently match exactly these files.'''
815 '''Return a matcher that will efficiently match exactly these files.'''
816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
816 return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn)
817
817
818 def parsefollowlinespattern(repo, rev, pat, msg):
818 def parsefollowlinespattern(repo, rev, pat, msg):
819 """Return a file name from `pat` pattern suitable for usage in followlines
819 """Return a file name from `pat` pattern suitable for usage in followlines
820 logic.
820 logic.
821 """
821 """
822 if not matchmod.patkind(pat):
822 if not matchmod.patkind(pat):
823 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
823 return pathutil.canonpath(repo.root, repo.getcwd(), pat)
824 else:
824 else:
825 ctx = repo[rev]
825 ctx = repo[rev]
826 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
826 m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=ctx)
827 files = [f for f in ctx if m(f)]
827 files = [f for f in ctx if m(f)]
828 if len(files) != 1:
828 if len(files) != 1:
829 raise error.ParseError(msg)
829 raise error.ParseError(msg)
830 return files[0]
830 return files[0]
831
831
832 def getorigvfs(ui, repo):
832 def getorigvfs(ui, repo):
833 """return a vfs suitable to save 'orig' file
833 """return a vfs suitable to save 'orig' file
834
834
835 return None if no special directory is configured"""
835 return None if no special directory is configured"""
836 origbackuppath = ui.config('ui', 'origbackuppath')
836 origbackuppath = ui.config('ui', 'origbackuppath')
837 if not origbackuppath:
837 if not origbackuppath:
838 return None
838 return None
839 return vfs.vfs(repo.wvfs.join(origbackuppath))
839 return vfs.vfs(repo.wvfs.join(origbackuppath))
840
840
841 def backuppath(ui, repo, filepath):
841 def backuppath(ui, repo, filepath):
842 '''customize where working copy backup files (.orig files) are created
842 '''customize where working copy backup files (.orig files) are created
843
843
844 Fetch user defined path from config file: [ui] origbackuppath = <path>
844 Fetch user defined path from config file: [ui] origbackuppath = <path>
845 Fall back to default (filepath with .orig suffix) if not specified
845 Fall back to default (filepath with .orig suffix) if not specified
846
846
847 filepath is repo-relative
847 filepath is repo-relative
848
848
849 Returns an absolute path
849 Returns an absolute path
850 '''
850 '''
851 origvfs = getorigvfs(ui, repo)
851 origvfs = getorigvfs(ui, repo)
852 if origvfs is None:
852 if origvfs is None:
853 return repo.wjoin(filepath + ".orig")
853 return repo.wjoin(filepath + ".orig")
854
854
855 origbackupdir = origvfs.dirname(filepath)
855 origbackupdir = origvfs.dirname(filepath)
856 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
856 if not origvfs.isdir(origbackupdir) or origvfs.islink(origbackupdir):
857 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
857 ui.note(_('creating directory: %s\n') % origvfs.join(origbackupdir))
858
858
859 # Remove any files that conflict with the backup file's path
859 # Remove any files that conflict with the backup file's path
860 for f in reversed(list(util.finddirs(filepath))):
860 for f in reversed(list(util.finddirs(filepath))):
861 if origvfs.isfileorlink(f):
861 if origvfs.isfileorlink(f):
862 ui.note(_('removing conflicting file: %s\n')
862 ui.note(_('removing conflicting file: %s\n')
863 % origvfs.join(f))
863 % origvfs.join(f))
864 origvfs.unlink(f)
864 origvfs.unlink(f)
865 break
865 break
866
866
867 origvfs.makedirs(origbackupdir)
867 origvfs.makedirs(origbackupdir)
868
868
869 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
869 if origvfs.isdir(filepath) and not origvfs.islink(filepath):
870 ui.note(_('removing conflicting directory: %s\n')
870 ui.note(_('removing conflicting directory: %s\n')
871 % origvfs.join(filepath))
871 % origvfs.join(filepath))
872 origvfs.rmtree(filepath, forcibly=True)
872 origvfs.rmtree(filepath, forcibly=True)
873
873
874 return origvfs.join(filepath)
874 return origvfs.join(filepath)
875
875
876 class _containsnode(object):
876 class _containsnode(object):
877 """proxy __contains__(node) to container.__contains__ which accepts revs"""
877 """proxy __contains__(node) to container.__contains__ which accepts revs"""
878
878
879 def __init__(self, repo, revcontainer):
879 def __init__(self, repo, revcontainer):
880 self._torev = repo.changelog.rev
880 self._torev = repo.changelog.rev
881 self._revcontains = revcontainer.__contains__
881 self._revcontains = revcontainer.__contains__
882
882
883 def __contains__(self, node):
883 def __contains__(self, node):
884 return self._revcontains(self._torev(node))
884 return self._revcontains(self._torev(node))
885
885
886 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
886 def cleanupnodes(repo, replacements, operation, moves=None, metadata=None,
887 fixphase=False, targetphase=None, backup=True):
887 fixphase=False, targetphase=None, backup=True):
888 """do common cleanups when old nodes are replaced by new nodes
888 """do common cleanups when old nodes are replaced by new nodes
889
889
890 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
890 That includes writing obsmarkers or stripping nodes, and moving bookmarks.
891 (we might also want to move working directory parent in the future)
891 (we might also want to move working directory parent in the future)
892
892
893 By default, bookmark moves are calculated automatically from 'replacements',
893 By default, bookmark moves are calculated automatically from 'replacements',
894 but 'moves' can be used to override that. Also, 'moves' may include
894 but 'moves' can be used to override that. Also, 'moves' may include
895 additional bookmark moves that should not have associated obsmarkers.
895 additional bookmark moves that should not have associated obsmarkers.
896
896
897 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
897 replacements is {oldnode: [newnode]} or a iterable of nodes if they do not
898 have replacements. operation is a string, like "rebase".
898 have replacements. operation is a string, like "rebase".
899
899
900 metadata is dictionary containing metadata to be stored in obsmarker if
900 metadata is dictionary containing metadata to be stored in obsmarker if
901 obsolescence is enabled.
901 obsolescence is enabled.
902 """
902 """
903 assert fixphase or targetphase is None
903 assert fixphase or targetphase is None
904 if not replacements and not moves:
904 if not replacements and not moves:
905 return
905 return
906
906
907 # translate mapping's other forms
907 # translate mapping's other forms
908 if not util.safehasattr(replacements, 'items'):
908 if not util.safehasattr(replacements, 'items'):
909 replacements = {(n,): () for n in replacements}
909 replacements = {(n,): () for n in replacements}
910 else:
910 else:
911 # upgrading non tuple "source" to tuple ones for BC
911 # upgrading non tuple "source" to tuple ones for BC
912 repls = {}
912 repls = {}
913 for key, value in replacements.items():
913 for key, value in replacements.items():
914 if not isinstance(key, tuple):
914 if not isinstance(key, tuple):
915 key = (key,)
915 key = (key,)
916 repls[key] = value
916 repls[key] = value
917 replacements = repls
917 replacements = repls
918
918
919 # Unfiltered repo is needed since nodes in replacements might be hidden.
919 # Unfiltered repo is needed since nodes in replacements might be hidden.
920 unfi = repo.unfiltered()
920 unfi = repo.unfiltered()
921
921
922 # Calculate bookmark movements
922 # Calculate bookmark movements
923 if moves is None:
923 if moves is None:
924 moves = {}
924 moves = {}
925 for oldnodes, newnodes in replacements.items():
925 for oldnodes, newnodes in replacements.items():
926 for oldnode in oldnodes:
926 for oldnode in oldnodes:
927 if oldnode in moves:
927 if oldnode in moves:
928 continue
928 continue
929 if len(newnodes) > 1:
929 if len(newnodes) > 1:
930 # usually a split, take the one with biggest rev number
930 # usually a split, take the one with biggest rev number
931 newnode = next(unfi.set('max(%ln)', newnodes)).node()
931 newnode = next(unfi.set('max(%ln)', newnodes)).node()
932 elif len(newnodes) == 0:
932 elif len(newnodes) == 0:
933 # move bookmark backwards
933 # move bookmark backwards
934 allreplaced = []
934 allreplaced = []
935 for rep in replacements:
935 for rep in replacements:
936 allreplaced.extend(rep)
936 allreplaced.extend(rep)
937 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
937 roots = list(unfi.set('max((::%n) - %ln)', oldnode,
938 allreplaced))
938 allreplaced))
939 if roots:
939 if roots:
940 newnode = roots[0].node()
940 newnode = roots[0].node()
941 else:
941 else:
942 newnode = nullid
942 newnode = nullid
943 else:
943 else:
944 newnode = newnodes[0]
944 newnode = newnodes[0]
945 moves[oldnode] = newnode
945 moves[oldnode] = newnode
946
946
947 allnewnodes = [n for ns in replacements.values() for n in ns]
947 allnewnodes = [n for ns in replacements.values() for n in ns]
948 toretract = {}
948 toretract = {}
949 toadvance = {}
949 toadvance = {}
950 if fixphase:
950 if fixphase:
951 precursors = {}
951 precursors = {}
952 for oldnodes, newnodes in replacements.items():
952 for oldnodes, newnodes in replacements.items():
953 for oldnode in oldnodes:
953 for oldnode in oldnodes:
954 for newnode in newnodes:
954 for newnode in newnodes:
955 precursors.setdefault(newnode, []).append(oldnode)
955 precursors.setdefault(newnode, []).append(oldnode)
956
956
957 allnewnodes.sort(key=lambda n: unfi[n].rev())
957 allnewnodes.sort(key=lambda n: unfi[n].rev())
958 newphases = {}
958 newphases = {}
959 def phase(ctx):
959 def phase(ctx):
960 return newphases.get(ctx.node(), ctx.phase())
960 return newphases.get(ctx.node(), ctx.phase())
961 for newnode in allnewnodes:
961 for newnode in allnewnodes:
962 ctx = unfi[newnode]
962 ctx = unfi[newnode]
963 parentphase = max(phase(p) for p in ctx.parents())
963 parentphase = max(phase(p) for p in ctx.parents())
964 if targetphase is None:
964 if targetphase is None:
965 oldphase = max(unfi[oldnode].phase()
965 oldphase = max(unfi[oldnode].phase()
966 for oldnode in precursors[newnode])
966 for oldnode in precursors[newnode])
967 newphase = max(oldphase, parentphase)
967 newphase = max(oldphase, parentphase)
968 else:
968 else:
969 newphase = max(targetphase, parentphase)
969 newphase = max(targetphase, parentphase)
970 newphases[newnode] = newphase
970 newphases[newnode] = newphase
971 if newphase > ctx.phase():
971 if newphase > ctx.phase():
972 toretract.setdefault(newphase, []).append(newnode)
972 toretract.setdefault(newphase, []).append(newnode)
973 elif newphase < ctx.phase():
973 elif newphase < ctx.phase():
974 toadvance.setdefault(newphase, []).append(newnode)
974 toadvance.setdefault(newphase, []).append(newnode)
975
975
976 with repo.transaction('cleanup') as tr:
976 with repo.transaction('cleanup') as tr:
977 # Move bookmarks
977 # Move bookmarks
978 bmarks = repo._bookmarks
978 bmarks = repo._bookmarks
979 bmarkchanges = []
979 bmarkchanges = []
980 for oldnode, newnode in moves.items():
980 for oldnode, newnode in moves.items():
981 oldbmarks = repo.nodebookmarks(oldnode)
981 oldbmarks = repo.nodebookmarks(oldnode)
982 if not oldbmarks:
982 if not oldbmarks:
983 continue
983 continue
984 from . import bookmarks # avoid import cycle
984 from . import bookmarks # avoid import cycle
985 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
985 repo.ui.debug('moving bookmarks %r from %s to %s\n' %
986 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
986 (pycompat.rapply(pycompat.maybebytestr, oldbmarks),
987 hex(oldnode), hex(newnode)))
987 hex(oldnode), hex(newnode)))
988 # Delete divergent bookmarks being parents of related newnodes
988 # Delete divergent bookmarks being parents of related newnodes
989 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
989 deleterevs = repo.revs('parents(roots(%ln & (::%n))) - parents(%n)',
990 allnewnodes, newnode, oldnode)
990 allnewnodes, newnode, oldnode)
991 deletenodes = _containsnode(repo, deleterevs)
991 deletenodes = _containsnode(repo, deleterevs)
992 for name in oldbmarks:
992 for name in oldbmarks:
993 bmarkchanges.append((name, newnode))
993 bmarkchanges.append((name, newnode))
994 for b in bookmarks.divergent2delete(repo, deletenodes, name):
994 for b in bookmarks.divergent2delete(repo, deletenodes, name):
995 bmarkchanges.append((b, None))
995 bmarkchanges.append((b, None))
996
996
997 if bmarkchanges:
997 if bmarkchanges:
998 bmarks.applychanges(repo, tr, bmarkchanges)
998 bmarks.applychanges(repo, tr, bmarkchanges)
999
999
1000 for phase, nodes in toretract.items():
1000 for phase, nodes in toretract.items():
1001 phases.retractboundary(repo, tr, phase, nodes)
1001 phases.retractboundary(repo, tr, phase, nodes)
1002 for phase, nodes in toadvance.items():
1002 for phase, nodes in toadvance.items():
1003 phases.advanceboundary(repo, tr, phase, nodes)
1003 phases.advanceboundary(repo, tr, phase, nodes)
1004
1004
1005 # Obsolete or strip nodes
1005 # Obsolete or strip nodes
1006 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1006 if obsolete.isenabled(repo, obsolete.createmarkersopt):
1007 # If a node is already obsoleted, and we want to obsolete it
1007 # If a node is already obsoleted, and we want to obsolete it
1008 # without a successor, skip that obssolete request since it's
1008 # without a successor, skip that obssolete request since it's
1009 # unnecessary. That's the "if s or not isobs(n)" check below.
1009 # unnecessary. That's the "if s or not isobs(n)" check below.
1010 # Also sort the node in topology order, that might be useful for
1010 # Also sort the node in topology order, that might be useful for
1011 # some obsstore logic.
1011 # some obsstore logic.
1012 # NOTE: the sorting might belong to createmarkers.
1012 # NOTE: the sorting might belong to createmarkers.
1013 torev = unfi.changelog.rev
1013 torev = unfi.changelog.rev
1014 sortfunc = lambda ns: torev(ns[0][0])
1014 sortfunc = lambda ns: torev(ns[0][0])
1015 rels = []
1015 rels = []
1016 for ns, s in sorted(replacements.items(), key=sortfunc):
1016 for ns, s in sorted(replacements.items(), key=sortfunc):
1017 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1017 rel = (tuple(unfi[n] for n in ns), tuple(unfi[m] for m in s))
1018 rels.append(rel)
1018 rels.append(rel)
1019 if rels:
1019 if rels:
1020 obsolete.createmarkers(repo, rels, operation=operation,
1020 obsolete.createmarkers(repo, rels, operation=operation,
1021 metadata=metadata)
1021 metadata=metadata)
1022 else:
1022 else:
1023 from . import repair # avoid import cycle
1023 from . import repair # avoid import cycle
1024 tostrip = list(n for ns in replacements for n in ns)
1024 tostrip = list(n for ns in replacements for n in ns)
1025 if tostrip:
1025 if tostrip:
1026 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1026 repair.delayedstrip(repo.ui, repo, tostrip, operation,
1027 backup=backup)
1027 backup=backup)
1028
1028
1029 def addremove(repo, matcher, prefix, opts=None):
1029 def addremove(repo, matcher, prefix, opts=None):
1030 if opts is None:
1030 if opts is None:
1031 opts = {}
1031 opts = {}
1032 m = matcher
1032 m = matcher
1033 dry_run = opts.get('dry_run')
1033 dry_run = opts.get('dry_run')
1034 try:
1034 try:
1035 similarity = float(opts.get('similarity') or 0)
1035 similarity = float(opts.get('similarity') or 0)
1036 except ValueError:
1036 except ValueError:
1037 raise error.Abort(_('similarity must be a number'))
1037 raise error.Abort(_('similarity must be a number'))
1038 if similarity < 0 or similarity > 100:
1038 if similarity < 0 or similarity > 100:
1039 raise error.Abort(_('similarity must be between 0 and 100'))
1039 raise error.Abort(_('similarity must be between 0 and 100'))
1040 similarity /= 100.0
1040 similarity /= 100.0
1041
1041
1042 ret = 0
1042 ret = 0
1043 join = lambda f: os.path.join(prefix, f)
1043 join = lambda f: os.path.join(prefix, f)
1044
1044
1045 wctx = repo[None]
1045 wctx = repo[None]
1046 for subpath in sorted(wctx.substate):
1046 for subpath in sorted(wctx.substate):
1047 submatch = matchmod.subdirmatcher(subpath, m)
1047 submatch = matchmod.subdirmatcher(subpath, m)
1048 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1048 if opts.get('subrepos') or m.exact(subpath) or any(submatch.files()):
1049 sub = wctx.sub(subpath)
1049 sub = wctx.sub(subpath)
1050 subprefix = repo.wvfs.reljoin(prefix, subpath)
1050 try:
1051 try:
1051 if sub.addremove(submatch, prefix, opts):
1052 if sub.addremove(submatch, subprefix, opts):
1052 ret = 1
1053 ret = 1
1053 except error.LookupError:
1054 except error.LookupError:
1054 repo.ui.status(_("skipping missing subrepository: %s\n")
1055 repo.ui.status(_("skipping missing subrepository: %s\n")
1055 % join(subpath))
1056 % join(subpath))
1056
1057
1057 rejected = []
1058 rejected = []
1058 def badfn(f, msg):
1059 def badfn(f, msg):
1059 if f in m.files():
1060 if f in m.files():
1060 m.bad(f, msg)
1061 m.bad(f, msg)
1061 rejected.append(f)
1062 rejected.append(f)
1062
1063
1063 badmatch = matchmod.badmatch(m, badfn)
1064 badmatch = matchmod.badmatch(m, badfn)
1064 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1065 added, unknown, deleted, removed, forgotten = _interestingfiles(repo,
1065 badmatch)
1066 badmatch)
1066
1067
1067 unknownset = set(unknown + forgotten)
1068 unknownset = set(unknown + forgotten)
1068 toprint = unknownset.copy()
1069 toprint = unknownset.copy()
1069 toprint.update(deleted)
1070 toprint.update(deleted)
1070 for abs in sorted(toprint):
1071 for abs in sorted(toprint):
1071 if repo.ui.verbose or not m.exact(abs):
1072 if repo.ui.verbose or not m.exact(abs):
1072 if abs in unknownset:
1073 if abs in unknownset:
1073 status = _('adding %s\n') % m.uipath(abs)
1074 status = _('adding %s\n') % m.uipath(abs)
1074 label = 'ui.addremove.added'
1075 label = 'ui.addremove.added'
1075 else:
1076 else:
1076 status = _('removing %s\n') % m.uipath(abs)
1077 status = _('removing %s\n') % m.uipath(abs)
1077 label = 'ui.addremove.removed'
1078 label = 'ui.addremove.removed'
1078 repo.ui.status(status, label=label)
1079 repo.ui.status(status, label=label)
1079
1080
1080 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1081 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1081 similarity)
1082 similarity)
1082
1083
1083 if not dry_run:
1084 if not dry_run:
1084 _markchanges(repo, unknown + forgotten, deleted, renames)
1085 _markchanges(repo, unknown + forgotten, deleted, renames)
1085
1086
1086 for f in rejected:
1087 for f in rejected:
1087 if f in m.files():
1088 if f in m.files():
1088 return 1
1089 return 1
1089 return ret
1090 return ret
1090
1091
1091 def marktouched(repo, files, similarity=0.0):
1092 def marktouched(repo, files, similarity=0.0):
1092 '''Assert that files have somehow been operated upon. files are relative to
1093 '''Assert that files have somehow been operated upon. files are relative to
1093 the repo root.'''
1094 the repo root.'''
1094 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1095 m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x))
1095 rejected = []
1096 rejected = []
1096
1097
1097 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1098 added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m)
1098
1099
1099 if repo.ui.verbose:
1100 if repo.ui.verbose:
1100 unknownset = set(unknown + forgotten)
1101 unknownset = set(unknown + forgotten)
1101 toprint = unknownset.copy()
1102 toprint = unknownset.copy()
1102 toprint.update(deleted)
1103 toprint.update(deleted)
1103 for abs in sorted(toprint):
1104 for abs in sorted(toprint):
1104 if abs in unknownset:
1105 if abs in unknownset:
1105 status = _('adding %s\n') % abs
1106 status = _('adding %s\n') % abs
1106 else:
1107 else:
1107 status = _('removing %s\n') % abs
1108 status = _('removing %s\n') % abs
1108 repo.ui.status(status)
1109 repo.ui.status(status)
1109
1110
1110 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1111 renames = _findrenames(repo, m, added + unknown, removed + deleted,
1111 similarity)
1112 similarity)
1112
1113
1113 _markchanges(repo, unknown + forgotten, deleted, renames)
1114 _markchanges(repo, unknown + forgotten, deleted, renames)
1114
1115
1115 for f in rejected:
1116 for f in rejected:
1116 if f in m.files():
1117 if f in m.files():
1117 return 1
1118 return 1
1118 return 0
1119 return 0
1119
1120
1120 def _interestingfiles(repo, matcher):
1121 def _interestingfiles(repo, matcher):
1121 '''Walk dirstate with matcher, looking for files that addremove would care
1122 '''Walk dirstate with matcher, looking for files that addremove would care
1122 about.
1123 about.
1123
1124
1124 This is different from dirstate.status because it doesn't care about
1125 This is different from dirstate.status because it doesn't care about
1125 whether files are modified or clean.'''
1126 whether files are modified or clean.'''
1126 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1127 added, unknown, deleted, removed, forgotten = [], [], [], [], []
1127 audit_path = pathutil.pathauditor(repo.root, cached=True)
1128 audit_path = pathutil.pathauditor(repo.root, cached=True)
1128
1129
1129 ctx = repo[None]
1130 ctx = repo[None]
1130 dirstate = repo.dirstate
1131 dirstate = repo.dirstate
1131 matcher = repo.narrowmatch(matcher, includeexact=True)
1132 matcher = repo.narrowmatch(matcher, includeexact=True)
1132 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1133 walkresults = dirstate.walk(matcher, subrepos=sorted(ctx.substate),
1133 unknown=True, ignored=False, full=False)
1134 unknown=True, ignored=False, full=False)
1134 for abs, st in walkresults.iteritems():
1135 for abs, st in walkresults.iteritems():
1135 dstate = dirstate[abs]
1136 dstate = dirstate[abs]
1136 if dstate == '?' and audit_path.check(abs):
1137 if dstate == '?' and audit_path.check(abs):
1137 unknown.append(abs)
1138 unknown.append(abs)
1138 elif dstate != 'r' and not st:
1139 elif dstate != 'r' and not st:
1139 deleted.append(abs)
1140 deleted.append(abs)
1140 elif dstate == 'r' and st:
1141 elif dstate == 'r' and st:
1141 forgotten.append(abs)
1142 forgotten.append(abs)
1142 # for finding renames
1143 # for finding renames
1143 elif dstate == 'r' and not st:
1144 elif dstate == 'r' and not st:
1144 removed.append(abs)
1145 removed.append(abs)
1145 elif dstate == 'a':
1146 elif dstate == 'a':
1146 added.append(abs)
1147 added.append(abs)
1147
1148
1148 return added, unknown, deleted, removed, forgotten
1149 return added, unknown, deleted, removed, forgotten
1149
1150
1150 def _findrenames(repo, matcher, added, removed, similarity):
1151 def _findrenames(repo, matcher, added, removed, similarity):
1151 '''Find renames from removed files to added ones.'''
1152 '''Find renames from removed files to added ones.'''
1152 renames = {}
1153 renames = {}
1153 if similarity > 0:
1154 if similarity > 0:
1154 for old, new, score in similar.findrenames(repo, added, removed,
1155 for old, new, score in similar.findrenames(repo, added, removed,
1155 similarity):
1156 similarity):
1156 if (repo.ui.verbose or not matcher.exact(old)
1157 if (repo.ui.verbose or not matcher.exact(old)
1157 or not matcher.exact(new)):
1158 or not matcher.exact(new)):
1158 repo.ui.status(_('recording removal of %s as rename to %s '
1159 repo.ui.status(_('recording removal of %s as rename to %s '
1159 '(%d%% similar)\n') %
1160 '(%d%% similar)\n') %
1160 (matcher.rel(old), matcher.rel(new),
1161 (matcher.rel(old), matcher.rel(new),
1161 score * 100))
1162 score * 100))
1162 renames[new] = old
1163 renames[new] = old
1163 return renames
1164 return renames
1164
1165
1165 def _markchanges(repo, unknown, deleted, renames):
1166 def _markchanges(repo, unknown, deleted, renames):
1166 '''Marks the files in unknown as added, the files in deleted as removed,
1167 '''Marks the files in unknown as added, the files in deleted as removed,
1167 and the files in renames as copied.'''
1168 and the files in renames as copied.'''
1168 wctx = repo[None]
1169 wctx = repo[None]
1169 with repo.wlock():
1170 with repo.wlock():
1170 wctx.forget(deleted)
1171 wctx.forget(deleted)
1171 wctx.add(unknown)
1172 wctx.add(unknown)
1172 for new, old in renames.iteritems():
1173 for new, old in renames.iteritems():
1173 wctx.copy(old, new)
1174 wctx.copy(old, new)
1174
1175
1175 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1176 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
1176 """Update the dirstate to reflect the intent of copying src to dst. For
1177 """Update the dirstate to reflect the intent of copying src to dst. For
1177 different reasons it might not end with dst being marked as copied from src.
1178 different reasons it might not end with dst being marked as copied from src.
1178 """
1179 """
1179 origsrc = repo.dirstate.copied(src) or src
1180 origsrc = repo.dirstate.copied(src) or src
1180 if dst == origsrc: # copying back a copy?
1181 if dst == origsrc: # copying back a copy?
1181 if repo.dirstate[dst] not in 'mn' and not dryrun:
1182 if repo.dirstate[dst] not in 'mn' and not dryrun:
1182 repo.dirstate.normallookup(dst)
1183 repo.dirstate.normallookup(dst)
1183 else:
1184 else:
1184 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1185 if repo.dirstate[origsrc] == 'a' and origsrc == src:
1185 if not ui.quiet:
1186 if not ui.quiet:
1186 ui.warn(_("%s has not been committed yet, so no copy "
1187 ui.warn(_("%s has not been committed yet, so no copy "
1187 "data will be stored for %s.\n")
1188 "data will be stored for %s.\n")
1188 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1189 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
1189 if repo.dirstate[dst] in '?r' and not dryrun:
1190 if repo.dirstate[dst] in '?r' and not dryrun:
1190 wctx.add([dst])
1191 wctx.add([dst])
1191 elif not dryrun:
1192 elif not dryrun:
1192 wctx.copy(origsrc, dst)
1193 wctx.copy(origsrc, dst)
1193
1194
1194 def writerequires(opener, requirements):
1195 def writerequires(opener, requirements):
1195 with opener('requires', 'w', atomictemp=True) as fp:
1196 with opener('requires', 'w', atomictemp=True) as fp:
1196 for r in sorted(requirements):
1197 for r in sorted(requirements):
1197 fp.write("%s\n" % r)
1198 fp.write("%s\n" % r)
1198
1199
1199 class filecachesubentry(object):
1200 class filecachesubentry(object):
1200 def __init__(self, path, stat):
1201 def __init__(self, path, stat):
1201 self.path = path
1202 self.path = path
1202 self.cachestat = None
1203 self.cachestat = None
1203 self._cacheable = None
1204 self._cacheable = None
1204
1205
1205 if stat:
1206 if stat:
1206 self.cachestat = filecachesubentry.stat(self.path)
1207 self.cachestat = filecachesubentry.stat(self.path)
1207
1208
1208 if self.cachestat:
1209 if self.cachestat:
1209 self._cacheable = self.cachestat.cacheable()
1210 self._cacheable = self.cachestat.cacheable()
1210 else:
1211 else:
1211 # None means we don't know yet
1212 # None means we don't know yet
1212 self._cacheable = None
1213 self._cacheable = None
1213
1214
1214 def refresh(self):
1215 def refresh(self):
1215 if self.cacheable():
1216 if self.cacheable():
1216 self.cachestat = filecachesubentry.stat(self.path)
1217 self.cachestat = filecachesubentry.stat(self.path)
1217
1218
1218 def cacheable(self):
1219 def cacheable(self):
1219 if self._cacheable is not None:
1220 if self._cacheable is not None:
1220 return self._cacheable
1221 return self._cacheable
1221
1222
1222 # we don't know yet, assume it is for now
1223 # we don't know yet, assume it is for now
1223 return True
1224 return True
1224
1225
1225 def changed(self):
1226 def changed(self):
1226 # no point in going further if we can't cache it
1227 # no point in going further if we can't cache it
1227 if not self.cacheable():
1228 if not self.cacheable():
1228 return True
1229 return True
1229
1230
1230 newstat = filecachesubentry.stat(self.path)
1231 newstat = filecachesubentry.stat(self.path)
1231
1232
1232 # we may not know if it's cacheable yet, check again now
1233 # we may not know if it's cacheable yet, check again now
1233 if newstat and self._cacheable is None:
1234 if newstat and self._cacheable is None:
1234 self._cacheable = newstat.cacheable()
1235 self._cacheable = newstat.cacheable()
1235
1236
1236 # check again
1237 # check again
1237 if not self._cacheable:
1238 if not self._cacheable:
1238 return True
1239 return True
1239
1240
1240 if self.cachestat != newstat:
1241 if self.cachestat != newstat:
1241 self.cachestat = newstat
1242 self.cachestat = newstat
1242 return True
1243 return True
1243 else:
1244 else:
1244 return False
1245 return False
1245
1246
1246 @staticmethod
1247 @staticmethod
1247 def stat(path):
1248 def stat(path):
1248 try:
1249 try:
1249 return util.cachestat(path)
1250 return util.cachestat(path)
1250 except OSError as e:
1251 except OSError as e:
1251 if e.errno != errno.ENOENT:
1252 if e.errno != errno.ENOENT:
1252 raise
1253 raise
1253
1254
1254 class filecacheentry(object):
1255 class filecacheentry(object):
1255 def __init__(self, paths, stat=True):
1256 def __init__(self, paths, stat=True):
1256 self._entries = []
1257 self._entries = []
1257 for path in paths:
1258 for path in paths:
1258 self._entries.append(filecachesubentry(path, stat))
1259 self._entries.append(filecachesubentry(path, stat))
1259
1260
1260 def changed(self):
1261 def changed(self):
1261 '''true if any entry has changed'''
1262 '''true if any entry has changed'''
1262 for entry in self._entries:
1263 for entry in self._entries:
1263 if entry.changed():
1264 if entry.changed():
1264 return True
1265 return True
1265 return False
1266 return False
1266
1267
1267 def refresh(self):
1268 def refresh(self):
1268 for entry in self._entries:
1269 for entry in self._entries:
1269 entry.refresh()
1270 entry.refresh()
1270
1271
1271 class filecache(object):
1272 class filecache(object):
1272 """A property like decorator that tracks files under .hg/ for updates.
1273 """A property like decorator that tracks files under .hg/ for updates.
1273
1274
1274 On first access, the files defined as arguments are stat()ed and the
1275 On first access, the files defined as arguments are stat()ed and the
1275 results cached. The decorated function is called. The results are stashed
1276 results cached. The decorated function is called. The results are stashed
1276 away in a ``_filecache`` dict on the object whose method is decorated.
1277 away in a ``_filecache`` dict on the object whose method is decorated.
1277
1278
1278 On subsequent access, the cached result is used as it is set to the
1279 On subsequent access, the cached result is used as it is set to the
1279 instance dictionary.
1280 instance dictionary.
1280
1281
1281 On external property set/delete operations, the caller must update the
1282 On external property set/delete operations, the caller must update the
1282 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1283 corresponding _filecache entry appropriately. Use __class__.<attr>.set()
1283 instead of directly setting <attr>.
1284 instead of directly setting <attr>.
1284
1285
1285 When using the property API, the cached data is always used if available.
1286 When using the property API, the cached data is always used if available.
1286 No stat() is performed to check if the file has changed.
1287 No stat() is performed to check if the file has changed.
1287
1288
1288 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1289 Others can muck about with the state of the ``_filecache`` dict. e.g. they
1289 can populate an entry before the property's getter is called. In this case,
1290 can populate an entry before the property's getter is called. In this case,
1290 entries in ``_filecache`` will be used during property operations,
1291 entries in ``_filecache`` will be used during property operations,
1291 if available. If the underlying file changes, it is up to external callers
1292 if available. If the underlying file changes, it is up to external callers
1292 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1293 to reflect this by e.g. calling ``delattr(obj, attr)`` to remove the cached
1293 method result as well as possibly calling ``del obj._filecache[attr]`` to
1294 method result as well as possibly calling ``del obj._filecache[attr]`` to
1294 remove the ``filecacheentry``.
1295 remove the ``filecacheentry``.
1295 """
1296 """
1296
1297
1297 def __init__(self, *paths):
1298 def __init__(self, *paths):
1298 self.paths = paths
1299 self.paths = paths
1299
1300
1300 def join(self, obj, fname):
1301 def join(self, obj, fname):
1301 """Used to compute the runtime path of a cached file.
1302 """Used to compute the runtime path of a cached file.
1302
1303
1303 Users should subclass filecache and provide their own version of this
1304 Users should subclass filecache and provide their own version of this
1304 function to call the appropriate join function on 'obj' (an instance
1305 function to call the appropriate join function on 'obj' (an instance
1305 of the class that its member function was decorated).
1306 of the class that its member function was decorated).
1306 """
1307 """
1307 raise NotImplementedError
1308 raise NotImplementedError
1308
1309
1309 def __call__(self, func):
1310 def __call__(self, func):
1310 self.func = func
1311 self.func = func
1311 self.sname = func.__name__
1312 self.sname = func.__name__
1312 self.name = pycompat.sysbytes(self.sname)
1313 self.name = pycompat.sysbytes(self.sname)
1313 return self
1314 return self
1314
1315
1315 def __get__(self, obj, type=None):
1316 def __get__(self, obj, type=None):
1316 # if accessed on the class, return the descriptor itself.
1317 # if accessed on the class, return the descriptor itself.
1317 if obj is None:
1318 if obj is None:
1318 return self
1319 return self
1319
1320
1320 assert self.sname not in obj.__dict__
1321 assert self.sname not in obj.__dict__
1321
1322
1322 entry = obj._filecache.get(self.name)
1323 entry = obj._filecache.get(self.name)
1323
1324
1324 if entry:
1325 if entry:
1325 if entry.changed():
1326 if entry.changed():
1326 entry.obj = self.func(obj)
1327 entry.obj = self.func(obj)
1327 else:
1328 else:
1328 paths = [self.join(obj, path) for path in self.paths]
1329 paths = [self.join(obj, path) for path in self.paths]
1329
1330
1330 # We stat -before- creating the object so our cache doesn't lie if
1331 # We stat -before- creating the object so our cache doesn't lie if
1331 # a writer modified between the time we read and stat
1332 # a writer modified between the time we read and stat
1332 entry = filecacheentry(paths, True)
1333 entry = filecacheentry(paths, True)
1333 entry.obj = self.func(obj)
1334 entry.obj = self.func(obj)
1334
1335
1335 obj._filecache[self.name] = entry
1336 obj._filecache[self.name] = entry
1336
1337
1337 obj.__dict__[self.sname] = entry.obj
1338 obj.__dict__[self.sname] = entry.obj
1338 return entry.obj
1339 return entry.obj
1339
1340
1340 # don't implement __set__(), which would make __dict__ lookup as slow as
1341 # don't implement __set__(), which would make __dict__ lookup as slow as
1341 # function call.
1342 # function call.
1342
1343
1343 def set(self, obj, value):
1344 def set(self, obj, value):
1344 if self.name not in obj._filecache:
1345 if self.name not in obj._filecache:
1345 # we add an entry for the missing value because X in __dict__
1346 # we add an entry for the missing value because X in __dict__
1346 # implies X in _filecache
1347 # implies X in _filecache
1347 paths = [self.join(obj, path) for path in self.paths]
1348 paths = [self.join(obj, path) for path in self.paths]
1348 ce = filecacheentry(paths, False)
1349 ce = filecacheentry(paths, False)
1349 obj._filecache[self.name] = ce
1350 obj._filecache[self.name] = ce
1350 else:
1351 else:
1351 ce = obj._filecache[self.name]
1352 ce = obj._filecache[self.name]
1352
1353
1353 ce.obj = value # update cached copy
1354 ce.obj = value # update cached copy
1354 obj.__dict__[self.sname] = value # update copy returned by obj.x
1355 obj.__dict__[self.sname] = value # update copy returned by obj.x
1355
1356
1356 def extdatasource(repo, source):
1357 def extdatasource(repo, source):
1357 """Gather a map of rev -> value dict from the specified source
1358 """Gather a map of rev -> value dict from the specified source
1358
1359
1359 A source spec is treated as a URL, with a special case shell: type
1360 A source spec is treated as a URL, with a special case shell: type
1360 for parsing the output from a shell command.
1361 for parsing the output from a shell command.
1361
1362
1362 The data is parsed as a series of newline-separated records where
1363 The data is parsed as a series of newline-separated records where
1363 each record is a revision specifier optionally followed by a space
1364 each record is a revision specifier optionally followed by a space
1364 and a freeform string value. If the revision is known locally, it
1365 and a freeform string value. If the revision is known locally, it
1365 is converted to a rev, otherwise the record is skipped.
1366 is converted to a rev, otherwise the record is skipped.
1366
1367
1367 Note that both key and value are treated as UTF-8 and converted to
1368 Note that both key and value are treated as UTF-8 and converted to
1368 the local encoding. This allows uniformity between local and
1369 the local encoding. This allows uniformity between local and
1369 remote data sources.
1370 remote data sources.
1370 """
1371 """
1371
1372
1372 spec = repo.ui.config("extdata", source)
1373 spec = repo.ui.config("extdata", source)
1373 if not spec:
1374 if not spec:
1374 raise error.Abort(_("unknown extdata source '%s'") % source)
1375 raise error.Abort(_("unknown extdata source '%s'") % source)
1375
1376
1376 data = {}
1377 data = {}
1377 src = proc = None
1378 src = proc = None
1378 try:
1379 try:
1379 if spec.startswith("shell:"):
1380 if spec.startswith("shell:"):
1380 # external commands should be run relative to the repo root
1381 # external commands should be run relative to the repo root
1381 cmd = spec[6:]
1382 cmd = spec[6:]
1382 proc = subprocess.Popen(procutil.tonativestr(cmd),
1383 proc = subprocess.Popen(procutil.tonativestr(cmd),
1383 shell=True, bufsize=-1,
1384 shell=True, bufsize=-1,
1384 close_fds=procutil.closefds,
1385 close_fds=procutil.closefds,
1385 stdout=subprocess.PIPE,
1386 stdout=subprocess.PIPE,
1386 cwd=procutil.tonativestr(repo.root))
1387 cwd=procutil.tonativestr(repo.root))
1387 src = proc.stdout
1388 src = proc.stdout
1388 else:
1389 else:
1389 # treat as a URL or file
1390 # treat as a URL or file
1390 src = url.open(repo.ui, spec)
1391 src = url.open(repo.ui, spec)
1391 for l in src:
1392 for l in src:
1392 if " " in l:
1393 if " " in l:
1393 k, v = l.strip().split(" ", 1)
1394 k, v = l.strip().split(" ", 1)
1394 else:
1395 else:
1395 k, v = l.strip(), ""
1396 k, v = l.strip(), ""
1396
1397
1397 k = encoding.tolocal(k)
1398 k = encoding.tolocal(k)
1398 try:
1399 try:
1399 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1400 data[revsingle(repo, k).rev()] = encoding.tolocal(v)
1400 except (error.LookupError, error.RepoLookupError):
1401 except (error.LookupError, error.RepoLookupError):
1401 pass # we ignore data for nodes that don't exist locally
1402 pass # we ignore data for nodes that don't exist locally
1402 finally:
1403 finally:
1403 if proc:
1404 if proc:
1404 proc.communicate()
1405 proc.communicate()
1405 if src:
1406 if src:
1406 src.close()
1407 src.close()
1407 if proc and proc.returncode != 0:
1408 if proc and proc.returncode != 0:
1408 raise error.Abort(_("extdata command '%s' failed: %s")
1409 raise error.Abort(_("extdata command '%s' failed: %s")
1409 % (cmd, procutil.explainexit(proc.returncode)))
1410 % (cmd, procutil.explainexit(proc.returncode)))
1410
1411
1411 return data
1412 return data
1412
1413
1413 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1414 def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs):
1414 if lock is None:
1415 if lock is None:
1415 raise error.LockInheritanceContractViolation(
1416 raise error.LockInheritanceContractViolation(
1416 'lock can only be inherited while held')
1417 'lock can only be inherited while held')
1417 if environ is None:
1418 if environ is None:
1418 environ = {}
1419 environ = {}
1419 with lock.inherit() as locker:
1420 with lock.inherit() as locker:
1420 environ[envvar] = locker
1421 environ[envvar] = locker
1421 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1422 return repo.ui.system(cmd, environ=environ, *args, **kwargs)
1422
1423
1423 def wlocksub(repo, cmd, *args, **kwargs):
1424 def wlocksub(repo, cmd, *args, **kwargs):
1424 """run cmd as a subprocess that allows inheriting repo's wlock
1425 """run cmd as a subprocess that allows inheriting repo's wlock
1425
1426
1426 This can only be called while the wlock is held. This takes all the
1427 This can only be called while the wlock is held. This takes all the
1427 arguments that ui.system does, and returns the exit code of the
1428 arguments that ui.system does, and returns the exit code of the
1428 subprocess."""
1429 subprocess."""
1429 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1430 return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args,
1430 **kwargs)
1431 **kwargs)
1431
1432
1432 class progress(object):
1433 class progress(object):
1433 def __init__(self, ui, updatebar, topic, unit="", total=None):
1434 def __init__(self, ui, updatebar, topic, unit="", total=None):
1434 self.ui = ui
1435 self.ui = ui
1435 self.pos = 0
1436 self.pos = 0
1436 self.topic = topic
1437 self.topic = topic
1437 self.unit = unit
1438 self.unit = unit
1438 self.total = total
1439 self.total = total
1439 self.debug = ui.configbool('progress', 'debug')
1440 self.debug = ui.configbool('progress', 'debug')
1440 self._updatebar = updatebar
1441 self._updatebar = updatebar
1441
1442
1442 def __enter__(self):
1443 def __enter__(self):
1443 return self
1444 return self
1444
1445
1445 def __exit__(self, exc_type, exc_value, exc_tb):
1446 def __exit__(self, exc_type, exc_value, exc_tb):
1446 self.complete()
1447 self.complete()
1447
1448
1448 def update(self, pos, item="", total=None):
1449 def update(self, pos, item="", total=None):
1449 assert pos is not None
1450 assert pos is not None
1450 if total:
1451 if total:
1451 self.total = total
1452 self.total = total
1452 self.pos = pos
1453 self.pos = pos
1453 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1454 self._updatebar(self.topic, self.pos, item, self.unit, self.total)
1454 if self.debug:
1455 if self.debug:
1455 self._printdebug(item)
1456 self._printdebug(item)
1456
1457
1457 def increment(self, step=1, item="", total=None):
1458 def increment(self, step=1, item="", total=None):
1458 self.update(self.pos + step, item, total)
1459 self.update(self.pos + step, item, total)
1459
1460
1460 def complete(self):
1461 def complete(self):
1461 self.pos = None
1462 self.pos = None
1462 self.unit = ""
1463 self.unit = ""
1463 self.total = None
1464 self.total = None
1464 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1465 self._updatebar(self.topic, self.pos, "", self.unit, self.total)
1465
1466
1466 def _printdebug(self, item):
1467 def _printdebug(self, item):
1467 if self.unit:
1468 if self.unit:
1468 unit = ' ' + self.unit
1469 unit = ' ' + self.unit
1469 if item:
1470 if item:
1470 item = ' ' + item
1471 item = ' ' + item
1471
1472
1472 if self.total:
1473 if self.total:
1473 pct = 100.0 * self.pos / self.total
1474 pct = 100.0 * self.pos / self.total
1474 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1475 self.ui.debug('%s:%s %d/%d%s (%4.2f%%)\n'
1475 % (self.topic, item, self.pos, self.total, unit, pct))
1476 % (self.topic, item, self.pos, self.total, unit, pct))
1476 else:
1477 else:
1477 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1478 self.ui.debug('%s:%s %d%s\n' % (self.topic, item, self.pos, unit))
1478
1479
1479 def gdinitconfig(ui):
1480 def gdinitconfig(ui):
1480 """helper function to know if a repo should be created as general delta
1481 """helper function to know if a repo should be created as general delta
1481 """
1482 """
1482 # experimental config: format.generaldelta
1483 # experimental config: format.generaldelta
1483 return (ui.configbool('format', 'generaldelta')
1484 return (ui.configbool('format', 'generaldelta')
1484 or ui.configbool('format', 'usegeneraldelta'))
1485 or ui.configbool('format', 'usegeneraldelta'))
1485
1486
1486 def gddeltaconfig(ui):
1487 def gddeltaconfig(ui):
1487 """helper function to know if incoming delta should be optimised
1488 """helper function to know if incoming delta should be optimised
1488 """
1489 """
1489 # experimental config: format.generaldelta
1490 # experimental config: format.generaldelta
1490 return ui.configbool('format', 'generaldelta')
1491 return ui.configbool('format', 'generaldelta')
1491
1492
1492 class simplekeyvaluefile(object):
1493 class simplekeyvaluefile(object):
1493 """A simple file with key=value lines
1494 """A simple file with key=value lines
1494
1495
1495 Keys must be alphanumerics and start with a letter, values must not
1496 Keys must be alphanumerics and start with a letter, values must not
1496 contain '\n' characters"""
1497 contain '\n' characters"""
1497 firstlinekey = '__firstline'
1498 firstlinekey = '__firstline'
1498
1499
1499 def __init__(self, vfs, path, keys=None):
1500 def __init__(self, vfs, path, keys=None):
1500 self.vfs = vfs
1501 self.vfs = vfs
1501 self.path = path
1502 self.path = path
1502
1503
1503 def read(self, firstlinenonkeyval=False):
1504 def read(self, firstlinenonkeyval=False):
1504 """Read the contents of a simple key-value file
1505 """Read the contents of a simple key-value file
1505
1506
1506 'firstlinenonkeyval' indicates whether the first line of file should
1507 'firstlinenonkeyval' indicates whether the first line of file should
1507 be treated as a key-value pair or reuturned fully under the
1508 be treated as a key-value pair or reuturned fully under the
1508 __firstline key."""
1509 __firstline key."""
1509 lines = self.vfs.readlines(self.path)
1510 lines = self.vfs.readlines(self.path)
1510 d = {}
1511 d = {}
1511 if firstlinenonkeyval:
1512 if firstlinenonkeyval:
1512 if not lines:
1513 if not lines:
1513 e = _("empty simplekeyvalue file")
1514 e = _("empty simplekeyvalue file")
1514 raise error.CorruptedState(e)
1515 raise error.CorruptedState(e)
1515 # we don't want to include '\n' in the __firstline
1516 # we don't want to include '\n' in the __firstline
1516 d[self.firstlinekey] = lines[0][:-1]
1517 d[self.firstlinekey] = lines[0][:-1]
1517 del lines[0]
1518 del lines[0]
1518
1519
1519 try:
1520 try:
1520 # the 'if line.strip()' part prevents us from failing on empty
1521 # the 'if line.strip()' part prevents us from failing on empty
1521 # lines which only contain '\n' therefore are not skipped
1522 # lines which only contain '\n' therefore are not skipped
1522 # by 'if line'
1523 # by 'if line'
1523 updatedict = dict(line[:-1].split('=', 1) for line in lines
1524 updatedict = dict(line[:-1].split('=', 1) for line in lines
1524 if line.strip())
1525 if line.strip())
1525 if self.firstlinekey in updatedict:
1526 if self.firstlinekey in updatedict:
1526 e = _("%r can't be used as a key")
1527 e = _("%r can't be used as a key")
1527 raise error.CorruptedState(e % self.firstlinekey)
1528 raise error.CorruptedState(e % self.firstlinekey)
1528 d.update(updatedict)
1529 d.update(updatedict)
1529 except ValueError as e:
1530 except ValueError as e:
1530 raise error.CorruptedState(str(e))
1531 raise error.CorruptedState(str(e))
1531 return d
1532 return d
1532
1533
1533 def write(self, data, firstline=None):
1534 def write(self, data, firstline=None):
1534 """Write key=>value mapping to a file
1535 """Write key=>value mapping to a file
1535 data is a dict. Keys must be alphanumerical and start with a letter.
1536 data is a dict. Keys must be alphanumerical and start with a letter.
1536 Values must not contain newline characters.
1537 Values must not contain newline characters.
1537
1538
1538 If 'firstline' is not None, it is written to file before
1539 If 'firstline' is not None, it is written to file before
1539 everything else, as it is, not in a key=value form"""
1540 everything else, as it is, not in a key=value form"""
1540 lines = []
1541 lines = []
1541 if firstline is not None:
1542 if firstline is not None:
1542 lines.append('%s\n' % firstline)
1543 lines.append('%s\n' % firstline)
1543
1544
1544 for k, v in data.items():
1545 for k, v in data.items():
1545 if k == self.firstlinekey:
1546 if k == self.firstlinekey:
1546 e = "key name '%s' is reserved" % self.firstlinekey
1547 e = "key name '%s' is reserved" % self.firstlinekey
1547 raise error.ProgrammingError(e)
1548 raise error.ProgrammingError(e)
1548 if not k[0:1].isalpha():
1549 if not k[0:1].isalpha():
1549 e = "keys must start with a letter in a key-value file"
1550 e = "keys must start with a letter in a key-value file"
1550 raise error.ProgrammingError(e)
1551 raise error.ProgrammingError(e)
1551 if not k.isalnum():
1552 if not k.isalnum():
1552 e = "invalid key name in a simple key-value file"
1553 e = "invalid key name in a simple key-value file"
1553 raise error.ProgrammingError(e)
1554 raise error.ProgrammingError(e)
1554 if '\n' in v:
1555 if '\n' in v:
1555 e = "invalid value in a simple key-value file"
1556 e = "invalid value in a simple key-value file"
1556 raise error.ProgrammingError(e)
1557 raise error.ProgrammingError(e)
1557 lines.append("%s=%s\n" % (k, v))
1558 lines.append("%s=%s\n" % (k, v))
1558 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1559 with self.vfs(self.path, mode='wb', atomictemp=True) as fp:
1559 fp.write(''.join(lines))
1560 fp.write(''.join(lines))
1560
1561
1561 _reportobsoletedsource = [
1562 _reportobsoletedsource = [
1562 'debugobsolete',
1563 'debugobsolete',
1563 'pull',
1564 'pull',
1564 'push',
1565 'push',
1565 'serve',
1566 'serve',
1566 'unbundle',
1567 'unbundle',
1567 ]
1568 ]
1568
1569
1569 _reportnewcssource = [
1570 _reportnewcssource = [
1570 'pull',
1571 'pull',
1571 'unbundle',
1572 'unbundle',
1572 ]
1573 ]
1573
1574
1574 def prefetchfiles(repo, revs, match):
1575 def prefetchfiles(repo, revs, match):
1575 """Invokes the registered file prefetch functions, allowing extensions to
1576 """Invokes the registered file prefetch functions, allowing extensions to
1576 ensure the corresponding files are available locally, before the command
1577 ensure the corresponding files are available locally, before the command
1577 uses them."""
1578 uses them."""
1578 if match:
1579 if match:
1579 # The command itself will complain about files that don't exist, so
1580 # The command itself will complain about files that don't exist, so
1580 # don't duplicate the message.
1581 # don't duplicate the message.
1581 match = matchmod.badmatch(match, lambda fn, msg: None)
1582 match = matchmod.badmatch(match, lambda fn, msg: None)
1582 else:
1583 else:
1583 match = matchall(repo)
1584 match = matchall(repo)
1584
1585
1585 fileprefetchhooks(repo, revs, match)
1586 fileprefetchhooks(repo, revs, match)
1586
1587
1587 # a list of (repo, revs, match) prefetch functions
1588 # a list of (repo, revs, match) prefetch functions
1588 fileprefetchhooks = util.hooks()
1589 fileprefetchhooks = util.hooks()
1589
1590
1590 # A marker that tells the evolve extension to suppress its own reporting
1591 # A marker that tells the evolve extension to suppress its own reporting
1591 _reportstroubledchangesets = True
1592 _reportstroubledchangesets = True
1592
1593
1593 def registersummarycallback(repo, otr, txnname=''):
1594 def registersummarycallback(repo, otr, txnname=''):
1594 """register a callback to issue a summary after the transaction is closed
1595 """register a callback to issue a summary after the transaction is closed
1595 """
1596 """
1596 def txmatch(sources):
1597 def txmatch(sources):
1597 return any(txnname.startswith(source) for source in sources)
1598 return any(txnname.startswith(source) for source in sources)
1598
1599
1599 categories = []
1600 categories = []
1600
1601
1601 def reportsummary(func):
1602 def reportsummary(func):
1602 """decorator for report callbacks."""
1603 """decorator for report callbacks."""
1603 # The repoview life cycle is shorter than the one of the actual
1604 # The repoview life cycle is shorter than the one of the actual
1604 # underlying repository. So the filtered object can die before the
1605 # underlying repository. So the filtered object can die before the
1605 # weakref is used leading to troubles. We keep a reference to the
1606 # weakref is used leading to troubles. We keep a reference to the
1606 # unfiltered object and restore the filtering when retrieving the
1607 # unfiltered object and restore the filtering when retrieving the
1607 # repository through the weakref.
1608 # repository through the weakref.
1608 filtername = repo.filtername
1609 filtername = repo.filtername
1609 reporef = weakref.ref(repo.unfiltered())
1610 reporef = weakref.ref(repo.unfiltered())
1610 def wrapped(tr):
1611 def wrapped(tr):
1611 repo = reporef()
1612 repo = reporef()
1612 if filtername:
1613 if filtername:
1613 repo = repo.filtered(filtername)
1614 repo = repo.filtered(filtername)
1614 func(repo, tr)
1615 func(repo, tr)
1615 newcat = '%02i-txnreport' % len(categories)
1616 newcat = '%02i-txnreport' % len(categories)
1616 otr.addpostclose(newcat, wrapped)
1617 otr.addpostclose(newcat, wrapped)
1617 categories.append(newcat)
1618 categories.append(newcat)
1618 return wrapped
1619 return wrapped
1619
1620
1620 if txmatch(_reportobsoletedsource):
1621 if txmatch(_reportobsoletedsource):
1621 @reportsummary
1622 @reportsummary
1622 def reportobsoleted(repo, tr):
1623 def reportobsoleted(repo, tr):
1623 obsoleted = obsutil.getobsoleted(repo, tr)
1624 obsoleted = obsutil.getobsoleted(repo, tr)
1624 if obsoleted:
1625 if obsoleted:
1625 repo.ui.status(_('obsoleted %i changesets\n')
1626 repo.ui.status(_('obsoleted %i changesets\n')
1626 % len(obsoleted))
1627 % len(obsoleted))
1627
1628
1628 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1629 if (obsolete.isenabled(repo, obsolete.createmarkersopt) and
1629 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1630 repo.ui.configbool('experimental', 'evolution.report-instabilities')):
1630 instabilitytypes = [
1631 instabilitytypes = [
1631 ('orphan', 'orphan'),
1632 ('orphan', 'orphan'),
1632 ('phase-divergent', 'phasedivergent'),
1633 ('phase-divergent', 'phasedivergent'),
1633 ('content-divergent', 'contentdivergent'),
1634 ('content-divergent', 'contentdivergent'),
1634 ]
1635 ]
1635
1636
1636 def getinstabilitycounts(repo):
1637 def getinstabilitycounts(repo):
1637 filtered = repo.changelog.filteredrevs
1638 filtered = repo.changelog.filteredrevs
1638 counts = {}
1639 counts = {}
1639 for instability, revset in instabilitytypes:
1640 for instability, revset in instabilitytypes:
1640 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1641 counts[instability] = len(set(obsolete.getrevs(repo, revset)) -
1641 filtered)
1642 filtered)
1642 return counts
1643 return counts
1643
1644
1644 oldinstabilitycounts = getinstabilitycounts(repo)
1645 oldinstabilitycounts = getinstabilitycounts(repo)
1645 @reportsummary
1646 @reportsummary
1646 def reportnewinstabilities(repo, tr):
1647 def reportnewinstabilities(repo, tr):
1647 newinstabilitycounts = getinstabilitycounts(repo)
1648 newinstabilitycounts = getinstabilitycounts(repo)
1648 for instability, revset in instabilitytypes:
1649 for instability, revset in instabilitytypes:
1649 delta = (newinstabilitycounts[instability] -
1650 delta = (newinstabilitycounts[instability] -
1650 oldinstabilitycounts[instability])
1651 oldinstabilitycounts[instability])
1651 msg = getinstabilitymessage(delta, instability)
1652 msg = getinstabilitymessage(delta, instability)
1652 if msg:
1653 if msg:
1653 repo.ui.warn(msg)
1654 repo.ui.warn(msg)
1654
1655
1655 if txmatch(_reportnewcssource):
1656 if txmatch(_reportnewcssource):
1656 @reportsummary
1657 @reportsummary
1657 def reportnewcs(repo, tr):
1658 def reportnewcs(repo, tr):
1658 """Report the range of new revisions pulled/unbundled."""
1659 """Report the range of new revisions pulled/unbundled."""
1659 origrepolen = tr.changes.get('origrepolen', len(repo))
1660 origrepolen = tr.changes.get('origrepolen', len(repo))
1660 unfi = repo.unfiltered()
1661 unfi = repo.unfiltered()
1661 if origrepolen >= len(unfi):
1662 if origrepolen >= len(unfi):
1662 return
1663 return
1663
1664
1664 # Compute the bounds of new visible revisions' range.
1665 # Compute the bounds of new visible revisions' range.
1665 revs = smartset.spanset(repo, start=origrepolen)
1666 revs = smartset.spanset(repo, start=origrepolen)
1666 if revs:
1667 if revs:
1667 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1668 minrev, maxrev = repo[revs.min()], repo[revs.max()]
1668
1669
1669 if minrev == maxrev:
1670 if minrev == maxrev:
1670 revrange = minrev
1671 revrange = minrev
1671 else:
1672 else:
1672 revrange = '%s:%s' % (minrev, maxrev)
1673 revrange = '%s:%s' % (minrev, maxrev)
1673 draft = len(repo.revs('%ld and draft()', revs))
1674 draft = len(repo.revs('%ld and draft()', revs))
1674 secret = len(repo.revs('%ld and secret()', revs))
1675 secret = len(repo.revs('%ld and secret()', revs))
1675 if not (draft or secret):
1676 if not (draft or secret):
1676 msg = _('new changesets %s\n') % revrange
1677 msg = _('new changesets %s\n') % revrange
1677 elif draft and secret:
1678 elif draft and secret:
1678 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1679 msg = _('new changesets %s (%d drafts, %d secrets)\n')
1679 msg %= (revrange, draft, secret)
1680 msg %= (revrange, draft, secret)
1680 elif draft:
1681 elif draft:
1681 msg = _('new changesets %s (%d drafts)\n')
1682 msg = _('new changesets %s (%d drafts)\n')
1682 msg %= (revrange, draft)
1683 msg %= (revrange, draft)
1683 elif secret:
1684 elif secret:
1684 msg = _('new changesets %s (%d secrets)\n')
1685 msg = _('new changesets %s (%d secrets)\n')
1685 msg %= (revrange, secret)
1686 msg %= (revrange, secret)
1686 else:
1687 else:
1687 errormsg = 'entered unreachable condition'
1688 errormsg = 'entered unreachable condition'
1688 raise error.ProgrammingError(errormsg)
1689 raise error.ProgrammingError(errormsg)
1689 repo.ui.status(msg)
1690 repo.ui.status(msg)
1690
1691
1691 # search new changesets directly pulled as obsolete
1692 # search new changesets directly pulled as obsolete
1692 duplicates = tr.changes.get('revduplicates', ())
1693 duplicates = tr.changes.get('revduplicates', ())
1693 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1694 obsadded = unfi.revs('(%d: + %ld) and obsolete()',
1694 origrepolen, duplicates)
1695 origrepolen, duplicates)
1695 cl = repo.changelog
1696 cl = repo.changelog
1696 extinctadded = [r for r in obsadded if r not in cl]
1697 extinctadded = [r for r in obsadded if r not in cl]
1697 if extinctadded:
1698 if extinctadded:
1698 # They are not just obsolete, but obsolete and invisible
1699 # They are not just obsolete, but obsolete and invisible
1699 # we call them "extinct" internally but the terms have not been
1700 # we call them "extinct" internally but the terms have not been
1700 # exposed to users.
1701 # exposed to users.
1701 msg = '(%d other changesets obsolete on arrival)\n'
1702 msg = '(%d other changesets obsolete on arrival)\n'
1702 repo.ui.status(msg % len(extinctadded))
1703 repo.ui.status(msg % len(extinctadded))
1703
1704
1704 @reportsummary
1705 @reportsummary
1705 def reportphasechanges(repo, tr):
1706 def reportphasechanges(repo, tr):
1706 """Report statistics of phase changes for changesets pre-existing
1707 """Report statistics of phase changes for changesets pre-existing
1707 pull/unbundle.
1708 pull/unbundle.
1708 """
1709 """
1709 origrepolen = tr.changes.get('origrepolen', len(repo))
1710 origrepolen = tr.changes.get('origrepolen', len(repo))
1710 phasetracking = tr.changes.get('phases', {})
1711 phasetracking = tr.changes.get('phases', {})
1711 if not phasetracking:
1712 if not phasetracking:
1712 return
1713 return
1713 published = [
1714 published = [
1714 rev for rev, (old, new) in phasetracking.iteritems()
1715 rev for rev, (old, new) in phasetracking.iteritems()
1715 if new == phases.public and rev < origrepolen
1716 if new == phases.public and rev < origrepolen
1716 ]
1717 ]
1717 if not published:
1718 if not published:
1718 return
1719 return
1719 repo.ui.status(_('%d local changesets published\n')
1720 repo.ui.status(_('%d local changesets published\n')
1720 % len(published))
1721 % len(published))
1721
1722
1722 def getinstabilitymessage(delta, instability):
1723 def getinstabilitymessage(delta, instability):
1723 """function to return the message to show warning about new instabilities
1724 """function to return the message to show warning about new instabilities
1724
1725
1725 exists as a separate function so that extension can wrap to show more
1726 exists as a separate function so that extension can wrap to show more
1726 information like how to fix instabilities"""
1727 information like how to fix instabilities"""
1727 if delta > 0:
1728 if delta > 0:
1728 return _('%i new %s changesets\n') % (delta, instability)
1729 return _('%i new %s changesets\n') % (delta, instability)
1729
1730
1730 def nodesummaries(repo, nodes, maxnumnodes=4):
1731 def nodesummaries(repo, nodes, maxnumnodes=4):
1731 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1732 if len(nodes) <= maxnumnodes or repo.ui.verbose:
1732 return ' '.join(short(h) for h in nodes)
1733 return ' '.join(short(h) for h in nodes)
1733 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1734 first = ' '.join(short(h) for h in nodes[:maxnumnodes])
1734 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1735 return _("%s and %d others") % (first, len(nodes) - maxnumnodes)
1735
1736
1736 def enforcesinglehead(repo, tr, desc):
1737 def enforcesinglehead(repo, tr, desc):
1737 """check that no named branch has multiple heads"""
1738 """check that no named branch has multiple heads"""
1738 if desc in ('strip', 'repair'):
1739 if desc in ('strip', 'repair'):
1739 # skip the logic during strip
1740 # skip the logic during strip
1740 return
1741 return
1741 visible = repo.filtered('visible')
1742 visible = repo.filtered('visible')
1742 # possible improvement: we could restrict the check to affected branch
1743 # possible improvement: we could restrict the check to affected branch
1743 for name, heads in visible.branchmap().iteritems():
1744 for name, heads in visible.branchmap().iteritems():
1744 if len(heads) > 1:
1745 if len(heads) > 1:
1745 msg = _('rejecting multiple heads on branch "%s"')
1746 msg = _('rejecting multiple heads on branch "%s"')
1746 msg %= name
1747 msg %= name
1747 hint = _('%d heads: %s')
1748 hint = _('%d heads: %s')
1748 hint %= (len(heads), nodesummaries(repo, heads))
1749 hint %= (len(heads), nodesummaries(repo, heads))
1749 raise error.Abort(msg, hint=hint)
1750 raise error.Abort(msg, hint=hint)
1750
1751
1751 def wrapconvertsink(sink):
1752 def wrapconvertsink(sink):
1752 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1753 """Allow extensions to wrap the sink returned by convcmd.convertsink()
1753 before it is used, whether or not the convert extension was formally loaded.
1754 before it is used, whether or not the convert extension was formally loaded.
1754 """
1755 """
1755 return sink
1756 return sink
1756
1757
1757 def unhidehashlikerevs(repo, specs, hiddentype):
1758 def unhidehashlikerevs(repo, specs, hiddentype):
1758 """parse the user specs and unhide changesets whose hash or revision number
1759 """parse the user specs and unhide changesets whose hash or revision number
1759 is passed.
1760 is passed.
1760
1761
1761 hiddentype can be: 1) 'warn': warn while unhiding changesets
1762 hiddentype can be: 1) 'warn': warn while unhiding changesets
1762 2) 'nowarn': don't warn while unhiding changesets
1763 2) 'nowarn': don't warn while unhiding changesets
1763
1764
1764 returns a repo object with the required changesets unhidden
1765 returns a repo object with the required changesets unhidden
1765 """
1766 """
1766 if not repo.filtername or not repo.ui.configbool('experimental',
1767 if not repo.filtername or not repo.ui.configbool('experimental',
1767 'directaccess'):
1768 'directaccess'):
1768 return repo
1769 return repo
1769
1770
1770 if repo.filtername not in ('visible', 'visible-hidden'):
1771 if repo.filtername not in ('visible', 'visible-hidden'):
1771 return repo
1772 return repo
1772
1773
1773 symbols = set()
1774 symbols = set()
1774 for spec in specs:
1775 for spec in specs:
1775 try:
1776 try:
1776 tree = revsetlang.parse(spec)
1777 tree = revsetlang.parse(spec)
1777 except error.ParseError: # will be reported by scmutil.revrange()
1778 except error.ParseError: # will be reported by scmutil.revrange()
1778 continue
1779 continue
1779
1780
1780 symbols.update(revsetlang.gethashlikesymbols(tree))
1781 symbols.update(revsetlang.gethashlikesymbols(tree))
1781
1782
1782 if not symbols:
1783 if not symbols:
1783 return repo
1784 return repo
1784
1785
1785 revs = _getrevsfromsymbols(repo, symbols)
1786 revs = _getrevsfromsymbols(repo, symbols)
1786
1787
1787 if not revs:
1788 if not revs:
1788 return repo
1789 return repo
1789
1790
1790 if hiddentype == 'warn':
1791 if hiddentype == 'warn':
1791 unfi = repo.unfiltered()
1792 unfi = repo.unfiltered()
1792 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1793 revstr = ", ".join([pycompat.bytestr(unfi[l]) for l in revs])
1793 repo.ui.warn(_("warning: accessing hidden changesets for write "
1794 repo.ui.warn(_("warning: accessing hidden changesets for write "
1794 "operation: %s\n") % revstr)
1795 "operation: %s\n") % revstr)
1795
1796
1796 # we have to use new filtername to separate branch/tags cache until we can
1797 # we have to use new filtername to separate branch/tags cache until we can
1797 # disbale these cache when revisions are dynamically pinned.
1798 # disbale these cache when revisions are dynamically pinned.
1798 return repo.filtered('visible-hidden', revs)
1799 return repo.filtered('visible-hidden', revs)
1799
1800
1800 def _getrevsfromsymbols(repo, symbols):
1801 def _getrevsfromsymbols(repo, symbols):
1801 """parse the list of symbols and returns a set of revision numbers of hidden
1802 """parse the list of symbols and returns a set of revision numbers of hidden
1802 changesets present in symbols"""
1803 changesets present in symbols"""
1803 revs = set()
1804 revs = set()
1804 unfi = repo.unfiltered()
1805 unfi = repo.unfiltered()
1805 unficl = unfi.changelog
1806 unficl = unfi.changelog
1806 cl = repo.changelog
1807 cl = repo.changelog
1807 tiprev = len(unficl)
1808 tiprev = len(unficl)
1808 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1809 allowrevnums = repo.ui.configbool('experimental', 'directaccess.revnums')
1809 for s in symbols:
1810 for s in symbols:
1810 try:
1811 try:
1811 n = int(s)
1812 n = int(s)
1812 if n <= tiprev:
1813 if n <= tiprev:
1813 if not allowrevnums:
1814 if not allowrevnums:
1814 continue
1815 continue
1815 else:
1816 else:
1816 if n not in cl:
1817 if n not in cl:
1817 revs.add(n)
1818 revs.add(n)
1818 continue
1819 continue
1819 except ValueError:
1820 except ValueError:
1820 pass
1821 pass
1821
1822
1822 try:
1823 try:
1823 s = resolvehexnodeidprefix(unfi, s)
1824 s = resolvehexnodeidprefix(unfi, s)
1824 except (error.LookupError, error.WdirUnsupported):
1825 except (error.LookupError, error.WdirUnsupported):
1825 s = None
1826 s = None
1826
1827
1827 if s is not None:
1828 if s is not None:
1828 rev = unficl.rev(s)
1829 rev = unficl.rev(s)
1829 if rev not in cl:
1830 if rev not in cl:
1830 revs.add(rev)
1831 revs.add(rev)
1831
1832
1832 return revs
1833 return revs
1833
1834
1834 def bookmarkrevs(repo, mark):
1835 def bookmarkrevs(repo, mark):
1835 """
1836 """
1836 Select revisions reachable by a given bookmark
1837 Select revisions reachable by a given bookmark
1837 """
1838 """
1838 return repo.revs("ancestors(bookmark(%s)) - "
1839 return repo.revs("ancestors(bookmark(%s)) - "
1839 "ancestors(head() and not bookmark(%s)) - "
1840 "ancestors(head() and not bookmark(%s)) - "
1840 "ancestors(bookmark() and not bookmark(%s))",
1841 "ancestors(bookmark() and not bookmark(%s))",
1841 mark, mark, mark)
1842 mark, mark, mark)
@@ -1,1844 +1,1843 b''
1 # subrepo.py - sub-repository classes and factory
1 # subrepo.py - sub-repository classes and factory
2 #
2 #
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
3 # Copyright 2009-2010 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import copy
10 import copy
11 import errno
11 import errno
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import posixpath
14 import posixpath
15 import re
15 import re
16 import stat
16 import stat
17 import subprocess
17 import subprocess
18 import sys
18 import sys
19 import tarfile
19 import tarfile
20 import xml.dom.minidom
20 import xml.dom.minidom
21
21
22 from .i18n import _
22 from .i18n import _
23 from . import (
23 from . import (
24 cmdutil,
24 cmdutil,
25 encoding,
25 encoding,
26 error,
26 error,
27 exchange,
27 exchange,
28 logcmdutil,
28 logcmdutil,
29 match as matchmod,
29 match as matchmod,
30 node,
30 node,
31 pathutil,
31 pathutil,
32 phases,
32 phases,
33 pycompat,
33 pycompat,
34 scmutil,
34 scmutil,
35 subrepoutil,
35 subrepoutil,
36 util,
36 util,
37 vfs as vfsmod,
37 vfs as vfsmod,
38 )
38 )
39 from .utils import (
39 from .utils import (
40 dateutil,
40 dateutil,
41 procutil,
41 procutil,
42 stringutil,
42 stringutil,
43 )
43 )
44
44
45 hg = None
45 hg = None
46 reporelpath = subrepoutil.reporelpath
46 reporelpath = subrepoutil.reporelpath
47 subrelpath = subrepoutil.subrelpath
47 subrelpath = subrepoutil.subrelpath
48 _abssource = subrepoutil._abssource
48 _abssource = subrepoutil._abssource
49 propertycache = util.propertycache
49 propertycache = util.propertycache
50
50
51 def _expandedabspath(path):
51 def _expandedabspath(path):
52 '''
52 '''
53 get a path or url and if it is a path expand it and return an absolute path
53 get a path or url and if it is a path expand it and return an absolute path
54 '''
54 '''
55 expandedpath = util.urllocalpath(util.expandpath(path))
55 expandedpath = util.urllocalpath(util.expandpath(path))
56 u = util.url(expandedpath)
56 u = util.url(expandedpath)
57 if not u.scheme:
57 if not u.scheme:
58 path = util.normpath(os.path.abspath(u.path))
58 path = util.normpath(os.path.abspath(u.path))
59 return path
59 return path
60
60
61 def _getstorehashcachename(remotepath):
61 def _getstorehashcachename(remotepath):
62 '''get a unique filename for the store hash cache of a remote repository'''
62 '''get a unique filename for the store hash cache of a remote repository'''
63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
63 return node.hex(hashlib.sha1(_expandedabspath(remotepath)).digest())[0:12]
64
64
65 class SubrepoAbort(error.Abort):
65 class SubrepoAbort(error.Abort):
66 """Exception class used to avoid handling a subrepo error more than once"""
66 """Exception class used to avoid handling a subrepo error more than once"""
67 def __init__(self, *args, **kw):
67 def __init__(self, *args, **kw):
68 self.subrepo = kw.pop(r'subrepo', None)
68 self.subrepo = kw.pop(r'subrepo', None)
69 self.cause = kw.pop(r'cause', None)
69 self.cause = kw.pop(r'cause', None)
70 error.Abort.__init__(self, *args, **kw)
70 error.Abort.__init__(self, *args, **kw)
71
71
72 def annotatesubrepoerror(func):
72 def annotatesubrepoerror(func):
73 def decoratedmethod(self, *args, **kargs):
73 def decoratedmethod(self, *args, **kargs):
74 try:
74 try:
75 res = func(self, *args, **kargs)
75 res = func(self, *args, **kargs)
76 except SubrepoAbort as ex:
76 except SubrepoAbort as ex:
77 # This exception has already been handled
77 # This exception has already been handled
78 raise ex
78 raise ex
79 except error.Abort as ex:
79 except error.Abort as ex:
80 subrepo = subrelpath(self)
80 subrepo = subrelpath(self)
81 errormsg = (stringutil.forcebytestr(ex) + ' '
81 errormsg = (stringutil.forcebytestr(ex) + ' '
82 + _('(in subrepository "%s")') % subrepo)
82 + _('(in subrepository "%s")') % subrepo)
83 # avoid handling this exception by raising a SubrepoAbort exception
83 # avoid handling this exception by raising a SubrepoAbort exception
84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
84 raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo,
85 cause=sys.exc_info())
85 cause=sys.exc_info())
86 return res
86 return res
87 return decoratedmethod
87 return decoratedmethod
88
88
89 def _updateprompt(ui, sub, dirty, local, remote):
89 def _updateprompt(ui, sub, dirty, local, remote):
90 if dirty:
90 if dirty:
91 msg = (_(' subrepository sources for %s differ\n'
91 msg = (_(' subrepository sources for %s differ\n'
92 'use (l)ocal source (%s) or (r)emote source (%s)?'
92 'use (l)ocal source (%s) or (r)emote source (%s)?'
93 '$$ &Local $$ &Remote')
93 '$$ &Local $$ &Remote')
94 % (subrelpath(sub), local, remote))
94 % (subrelpath(sub), local, remote))
95 else:
95 else:
96 msg = (_(' subrepository sources for %s differ (in checked out '
96 msg = (_(' subrepository sources for %s differ (in checked out '
97 'version)\n'
97 'version)\n'
98 'use (l)ocal source (%s) or (r)emote source (%s)?'
98 'use (l)ocal source (%s) or (r)emote source (%s)?'
99 '$$ &Local $$ &Remote')
99 '$$ &Local $$ &Remote')
100 % (subrelpath(sub), local, remote))
100 % (subrelpath(sub), local, remote))
101 return ui.promptchoice(msg, 0)
101 return ui.promptchoice(msg, 0)
102
102
103 def _sanitize(ui, vfs, ignore):
103 def _sanitize(ui, vfs, ignore):
104 for dirname, dirs, names in vfs.walk():
104 for dirname, dirs, names in vfs.walk():
105 for i, d in enumerate(dirs):
105 for i, d in enumerate(dirs):
106 if d.lower() == ignore:
106 if d.lower() == ignore:
107 del dirs[i]
107 del dirs[i]
108 break
108 break
109 if vfs.basename(dirname).lower() != '.hg':
109 if vfs.basename(dirname).lower() != '.hg':
110 continue
110 continue
111 for f in names:
111 for f in names:
112 if f.lower() == 'hgrc':
112 if f.lower() == 'hgrc':
113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
113 ui.warn(_("warning: removing potentially hostile 'hgrc' "
114 "in '%s'\n") % vfs.join(dirname))
114 "in '%s'\n") % vfs.join(dirname))
115 vfs.unlink(vfs.reljoin(dirname, f))
115 vfs.unlink(vfs.reljoin(dirname, f))
116
116
117 def _auditsubrepopath(repo, path):
117 def _auditsubrepopath(repo, path):
118 # sanity check for potentially unsafe paths such as '~' and '$FOO'
118 # sanity check for potentially unsafe paths such as '~' and '$FOO'
119 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
119 if path.startswith('~') or '$' in path or util.expandpath(path) != path:
120 raise error.Abort(_('subrepo path contains illegal component: %s')
120 raise error.Abort(_('subrepo path contains illegal component: %s')
121 % path)
121 % path)
122 # auditor doesn't check if the path itself is a symlink
122 # auditor doesn't check if the path itself is a symlink
123 pathutil.pathauditor(repo.root)(path)
123 pathutil.pathauditor(repo.root)(path)
124 if repo.wvfs.islink(path):
124 if repo.wvfs.islink(path):
125 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
125 raise error.Abort(_("subrepo '%s' traverses symbolic link") % path)
126
126
127 SUBREPO_ALLOWED_DEFAULTS = {
127 SUBREPO_ALLOWED_DEFAULTS = {
128 'hg': True,
128 'hg': True,
129 'git': False,
129 'git': False,
130 'svn': False,
130 'svn': False,
131 }
131 }
132
132
133 def _checktype(ui, kind):
133 def _checktype(ui, kind):
134 # subrepos.allowed is a master kill switch. If disabled, subrepos are
134 # subrepos.allowed is a master kill switch. If disabled, subrepos are
135 # disabled period.
135 # disabled period.
136 if not ui.configbool('subrepos', 'allowed', True):
136 if not ui.configbool('subrepos', 'allowed', True):
137 raise error.Abort(_('subrepos not enabled'),
137 raise error.Abort(_('subrepos not enabled'),
138 hint=_("see 'hg help config.subrepos' for details"))
138 hint=_("see 'hg help config.subrepos' for details"))
139
139
140 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
140 default = SUBREPO_ALLOWED_DEFAULTS.get(kind, False)
141 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
141 if not ui.configbool('subrepos', '%s:allowed' % kind, default):
142 raise error.Abort(_('%s subrepos not allowed') % kind,
142 raise error.Abort(_('%s subrepos not allowed') % kind,
143 hint=_("see 'hg help config.subrepos' for details"))
143 hint=_("see 'hg help config.subrepos' for details"))
144
144
145 if kind not in types:
145 if kind not in types:
146 raise error.Abort(_('unknown subrepo type %s') % kind)
146 raise error.Abort(_('unknown subrepo type %s') % kind)
147
147
148 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
148 def subrepo(ctx, path, allowwdir=False, allowcreate=True):
149 """return instance of the right subrepo class for subrepo in path"""
149 """return instance of the right subrepo class for subrepo in path"""
150 # subrepo inherently violates our import layering rules
150 # subrepo inherently violates our import layering rules
151 # because it wants to make repo objects from deep inside the stack
151 # because it wants to make repo objects from deep inside the stack
152 # so we manually delay the circular imports to not break
152 # so we manually delay the circular imports to not break
153 # scripts that don't use our demand-loading
153 # scripts that don't use our demand-loading
154 global hg
154 global hg
155 from . import hg as h
155 from . import hg as h
156 hg = h
156 hg = h
157
157
158 repo = ctx.repo()
158 repo = ctx.repo()
159 _auditsubrepopath(repo, path)
159 _auditsubrepopath(repo, path)
160 state = ctx.substate[path]
160 state = ctx.substate[path]
161 _checktype(repo.ui, state[2])
161 _checktype(repo.ui, state[2])
162 if allowwdir:
162 if allowwdir:
163 state = (state[0], ctx.subrev(path), state[2])
163 state = (state[0], ctx.subrev(path), state[2])
164 return types[state[2]](ctx, path, state[:2], allowcreate)
164 return types[state[2]](ctx, path, state[:2], allowcreate)
165
165
166 def nullsubrepo(ctx, path, pctx):
166 def nullsubrepo(ctx, path, pctx):
167 """return an empty subrepo in pctx for the extant subrepo in ctx"""
167 """return an empty subrepo in pctx for the extant subrepo in ctx"""
168 # subrepo inherently violates our import layering rules
168 # subrepo inherently violates our import layering rules
169 # because it wants to make repo objects from deep inside the stack
169 # because it wants to make repo objects from deep inside the stack
170 # so we manually delay the circular imports to not break
170 # so we manually delay the circular imports to not break
171 # scripts that don't use our demand-loading
171 # scripts that don't use our demand-loading
172 global hg
172 global hg
173 from . import hg as h
173 from . import hg as h
174 hg = h
174 hg = h
175
175
176 repo = ctx.repo()
176 repo = ctx.repo()
177 _auditsubrepopath(repo, path)
177 _auditsubrepopath(repo, path)
178 state = ctx.substate[path]
178 state = ctx.substate[path]
179 _checktype(repo.ui, state[2])
179 _checktype(repo.ui, state[2])
180 subrev = ''
180 subrev = ''
181 if state[2] == 'hg':
181 if state[2] == 'hg':
182 subrev = "0" * 40
182 subrev = "0" * 40
183 return types[state[2]](pctx, path, (state[0], subrev), True)
183 return types[state[2]](pctx, path, (state[0], subrev), True)
184
184
185 # subrepo classes need to implement the following abstract class:
185 # subrepo classes need to implement the following abstract class:
186
186
187 class abstractsubrepo(object):
187 class abstractsubrepo(object):
188
188
189 def __init__(self, ctx, path):
189 def __init__(self, ctx, path):
190 """Initialize abstractsubrepo part
190 """Initialize abstractsubrepo part
191
191
192 ``ctx`` is the context referring this subrepository in the
192 ``ctx`` is the context referring this subrepository in the
193 parent repository.
193 parent repository.
194
194
195 ``path`` is the path to this subrepository as seen from
195 ``path`` is the path to this subrepository as seen from
196 innermost repository.
196 innermost repository.
197 """
197 """
198 self.ui = ctx.repo().ui
198 self.ui = ctx.repo().ui
199 self._ctx = ctx
199 self._ctx = ctx
200 self._path = path
200 self._path = path
201
201
202 def addwebdirpath(self, serverpath, webconf):
202 def addwebdirpath(self, serverpath, webconf):
203 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
203 """Add the hgwebdir entries for this subrepo, and any of its subrepos.
204
204
205 ``serverpath`` is the path component of the URL for this repo.
205 ``serverpath`` is the path component of the URL for this repo.
206
206
207 ``webconf`` is the dictionary of hgwebdir entries.
207 ``webconf`` is the dictionary of hgwebdir entries.
208 """
208 """
209 pass
209 pass
210
210
211 def storeclean(self, path):
211 def storeclean(self, path):
212 """
212 """
213 returns true if the repository has not changed since it was last
213 returns true if the repository has not changed since it was last
214 cloned from or pushed to a given repository.
214 cloned from or pushed to a given repository.
215 """
215 """
216 return False
216 return False
217
217
218 def dirty(self, ignoreupdate=False, missing=False):
218 def dirty(self, ignoreupdate=False, missing=False):
219 """returns true if the dirstate of the subrepo is dirty or does not
219 """returns true if the dirstate of the subrepo is dirty or does not
220 match current stored state. If ignoreupdate is true, only check
220 match current stored state. If ignoreupdate is true, only check
221 whether the subrepo has uncommitted changes in its dirstate. If missing
221 whether the subrepo has uncommitted changes in its dirstate. If missing
222 is true, check for deleted files.
222 is true, check for deleted files.
223 """
223 """
224 raise NotImplementedError
224 raise NotImplementedError
225
225
226 def dirtyreason(self, ignoreupdate=False, missing=False):
226 def dirtyreason(self, ignoreupdate=False, missing=False):
227 """return reason string if it is ``dirty()``
227 """return reason string if it is ``dirty()``
228
228
229 Returned string should have enough information for the message
229 Returned string should have enough information for the message
230 of exception.
230 of exception.
231
231
232 This returns None, otherwise.
232 This returns None, otherwise.
233 """
233 """
234 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
234 if self.dirty(ignoreupdate=ignoreupdate, missing=missing):
235 return _('uncommitted changes in subrepository "%s"'
235 return _('uncommitted changes in subrepository "%s"'
236 ) % subrelpath(self)
236 ) % subrelpath(self)
237
237
238 def bailifchanged(self, ignoreupdate=False, hint=None):
238 def bailifchanged(self, ignoreupdate=False, hint=None):
239 """raise Abort if subrepository is ``dirty()``
239 """raise Abort if subrepository is ``dirty()``
240 """
240 """
241 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
241 dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate,
242 missing=True)
242 missing=True)
243 if dirtyreason:
243 if dirtyreason:
244 raise error.Abort(dirtyreason, hint=hint)
244 raise error.Abort(dirtyreason, hint=hint)
245
245
246 def basestate(self):
246 def basestate(self):
247 """current working directory base state, disregarding .hgsubstate
247 """current working directory base state, disregarding .hgsubstate
248 state and working directory modifications"""
248 state and working directory modifications"""
249 raise NotImplementedError
249 raise NotImplementedError
250
250
251 def checknested(self, path):
251 def checknested(self, path):
252 """check if path is a subrepository within this repository"""
252 """check if path is a subrepository within this repository"""
253 return False
253 return False
254
254
255 def commit(self, text, user, date):
255 def commit(self, text, user, date):
256 """commit the current changes to the subrepo with the given
256 """commit the current changes to the subrepo with the given
257 log message. Use given user and date if possible. Return the
257 log message. Use given user and date if possible. Return the
258 new state of the subrepo.
258 new state of the subrepo.
259 """
259 """
260 raise NotImplementedError
260 raise NotImplementedError
261
261
262 def phase(self, state):
262 def phase(self, state):
263 """returns phase of specified state in the subrepository.
263 """returns phase of specified state in the subrepository.
264 """
264 """
265 return phases.public
265 return phases.public
266
266
267 def remove(self):
267 def remove(self):
268 """remove the subrepo
268 """remove the subrepo
269
269
270 (should verify the dirstate is not dirty first)
270 (should verify the dirstate is not dirty first)
271 """
271 """
272 raise NotImplementedError
272 raise NotImplementedError
273
273
274 def get(self, state, overwrite=False):
274 def get(self, state, overwrite=False):
275 """run whatever commands are needed to put the subrepo into
275 """run whatever commands are needed to put the subrepo into
276 this state
276 this state
277 """
277 """
278 raise NotImplementedError
278 raise NotImplementedError
279
279
280 def merge(self, state):
280 def merge(self, state):
281 """merge currently-saved state with the new state."""
281 """merge currently-saved state with the new state."""
282 raise NotImplementedError
282 raise NotImplementedError
283
283
284 def push(self, opts):
284 def push(self, opts):
285 """perform whatever action is analogous to 'hg push'
285 """perform whatever action is analogous to 'hg push'
286
286
287 This may be a no-op on some systems.
287 This may be a no-op on some systems.
288 """
288 """
289 raise NotImplementedError
289 raise NotImplementedError
290
290
291 def add(self, ui, match, prefix, explicitonly, **opts):
291 def add(self, ui, match, prefix, explicitonly, **opts):
292 return []
292 return []
293
293
294 def addremove(self, matcher, prefix, opts):
294 def addremove(self, matcher, prefix, opts):
295 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
295 self.ui.warn("%s: %s" % (prefix, _("addremove is not supported")))
296 return 1
296 return 1
297
297
298 def cat(self, match, fm, fntemplate, prefix, **opts):
298 def cat(self, match, fm, fntemplate, prefix, **opts):
299 return 1
299 return 1
300
300
301 def status(self, rev2, **opts):
301 def status(self, rev2, **opts):
302 return scmutil.status([], [], [], [], [], [], [])
302 return scmutil.status([], [], [], [], [], [], [])
303
303
304 def diff(self, ui, diffopts, node2, match, prefix, **opts):
304 def diff(self, ui, diffopts, node2, match, prefix, **opts):
305 pass
305 pass
306
306
307 def outgoing(self, ui, dest, opts):
307 def outgoing(self, ui, dest, opts):
308 return 1
308 return 1
309
309
310 def incoming(self, ui, source, opts):
310 def incoming(self, ui, source, opts):
311 return 1
311 return 1
312
312
313 def files(self):
313 def files(self):
314 """return filename iterator"""
314 """return filename iterator"""
315 raise NotImplementedError
315 raise NotImplementedError
316
316
317 def filedata(self, name, decode):
317 def filedata(self, name, decode):
318 """return file data, optionally passed through repo decoders"""
318 """return file data, optionally passed through repo decoders"""
319 raise NotImplementedError
319 raise NotImplementedError
320
320
321 def fileflags(self, name):
321 def fileflags(self, name):
322 """return file flags"""
322 """return file flags"""
323 return ''
323 return ''
324
324
325 def matchfileset(self, expr, badfn=None):
325 def matchfileset(self, expr, badfn=None):
326 """Resolve the fileset expression for this repo"""
326 """Resolve the fileset expression for this repo"""
327 return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn)
327 return matchmod.nevermatcher(self.wvfs.base, '', badfn=badfn)
328
328
329 def printfiles(self, ui, m, fm, fmt, subrepos):
329 def printfiles(self, ui, m, fm, fmt, subrepos):
330 """handle the files command for this subrepo"""
330 """handle the files command for this subrepo"""
331 return 1
331 return 1
332
332
333 def archive(self, archiver, prefix, match=None, decode=True):
333 def archive(self, archiver, prefix, match=None, decode=True):
334 if match is not None:
334 if match is not None:
335 files = [f for f in self.files() if match(f)]
335 files = [f for f in self.files() if match(f)]
336 else:
336 else:
337 files = self.files()
337 files = self.files()
338 total = len(files)
338 total = len(files)
339 relpath = subrelpath(self)
339 relpath = subrelpath(self)
340 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
340 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
341 unit=_('files'), total=total)
341 unit=_('files'), total=total)
342 progress.update(0)
342 progress.update(0)
343 for name in files:
343 for name in files:
344 flags = self.fileflags(name)
344 flags = self.fileflags(name)
345 mode = 'x' in flags and 0o755 or 0o644
345 mode = 'x' in flags and 0o755 or 0o644
346 symlink = 'l' in flags
346 symlink = 'l' in flags
347 archiver.addfile(prefix + self._path + '/' + name,
347 archiver.addfile(prefix + self._path + '/' + name,
348 mode, symlink, self.filedata(name, decode))
348 mode, symlink, self.filedata(name, decode))
349 progress.increment()
349 progress.increment()
350 progress.complete()
350 progress.complete()
351 return total
351 return total
352
352
353 def walk(self, match):
353 def walk(self, match):
354 '''
354 '''
355 walk recursively through the directory tree, finding all files
355 walk recursively through the directory tree, finding all files
356 matched by the match function
356 matched by the match function
357 '''
357 '''
358
358
359 def forget(self, match, prefix, dryrun, interactive):
359 def forget(self, match, prefix, dryrun, interactive):
360 return ([], [])
360 return ([], [])
361
361
362 def removefiles(self, matcher, prefix, after, force, subrepos,
362 def removefiles(self, matcher, prefix, after, force, subrepos,
363 dryrun, warnings):
363 dryrun, warnings):
364 """remove the matched files from the subrepository and the filesystem,
364 """remove the matched files from the subrepository and the filesystem,
365 possibly by force and/or after the file has been removed from the
365 possibly by force and/or after the file has been removed from the
366 filesystem. Return 0 on success, 1 on any warning.
366 filesystem. Return 0 on success, 1 on any warning.
367 """
367 """
368 warnings.append(_("warning: removefiles not implemented (%s)")
368 warnings.append(_("warning: removefiles not implemented (%s)")
369 % self._path)
369 % self._path)
370 return 1
370 return 1
371
371
372 def revert(self, substate, *pats, **opts):
372 def revert(self, substate, *pats, **opts):
373 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
373 self.ui.warn(_('%s: reverting %s subrepos is unsupported\n') \
374 % (substate[0], substate[2]))
374 % (substate[0], substate[2]))
375 return []
375 return []
376
376
377 def shortid(self, revid):
377 def shortid(self, revid):
378 return revid
378 return revid
379
379
380 def unshare(self):
380 def unshare(self):
381 '''
381 '''
382 convert this repository from shared to normal storage.
382 convert this repository from shared to normal storage.
383 '''
383 '''
384
384
385 def verify(self):
385 def verify(self):
386 '''verify the integrity of the repository. Return 0 on success or
386 '''verify the integrity of the repository. Return 0 on success or
387 warning, 1 on any error.
387 warning, 1 on any error.
388 '''
388 '''
389 return 0
389 return 0
390
390
391 @propertycache
391 @propertycache
392 def wvfs(self):
392 def wvfs(self):
393 """return vfs to access the working directory of this subrepository
393 """return vfs to access the working directory of this subrepository
394 """
394 """
395 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
395 return vfsmod.vfs(self._ctx.repo().wvfs.join(self._path))
396
396
397 @propertycache
397 @propertycache
398 def _relpath(self):
398 def _relpath(self):
399 """return path to this subrepository as seen from outermost repository
399 """return path to this subrepository as seen from outermost repository
400 """
400 """
401 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
401 return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path)
402
402
403 class hgsubrepo(abstractsubrepo):
403 class hgsubrepo(abstractsubrepo):
404 def __init__(self, ctx, path, state, allowcreate):
404 def __init__(self, ctx, path, state, allowcreate):
405 super(hgsubrepo, self).__init__(ctx, path)
405 super(hgsubrepo, self).__init__(ctx, path)
406 self._state = state
406 self._state = state
407 r = ctx.repo()
407 r = ctx.repo()
408 root = r.wjoin(path)
408 root = r.wjoin(path)
409 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
409 create = allowcreate and not r.wvfs.exists('%s/.hg' % path)
410 # repository constructor does expand variables in path, which is
410 # repository constructor does expand variables in path, which is
411 # unsafe since subrepo path might come from untrusted source.
411 # unsafe since subrepo path might come from untrusted source.
412 if os.path.realpath(util.expandpath(root)) != root:
412 if os.path.realpath(util.expandpath(root)) != root:
413 raise error.Abort(_('subrepo path contains illegal component: %s')
413 raise error.Abort(_('subrepo path contains illegal component: %s')
414 % path)
414 % path)
415 self._repo = hg.repository(r.baseui, root, create=create)
415 self._repo = hg.repository(r.baseui, root, create=create)
416 if self._repo.root != root:
416 if self._repo.root != root:
417 raise error.ProgrammingError('failed to reject unsafe subrepo '
417 raise error.ProgrammingError('failed to reject unsafe subrepo '
418 'path: %s (expanded to %s)'
418 'path: %s (expanded to %s)'
419 % (root, self._repo.root))
419 % (root, self._repo.root))
420
420
421 # Propagate the parent's --hidden option
421 # Propagate the parent's --hidden option
422 if r is r.unfiltered():
422 if r is r.unfiltered():
423 self._repo = self._repo.unfiltered()
423 self._repo = self._repo.unfiltered()
424
424
425 self.ui = self._repo.ui
425 self.ui = self._repo.ui
426 for s, k in [('ui', 'commitsubrepos')]:
426 for s, k in [('ui', 'commitsubrepos')]:
427 v = r.ui.config(s, k)
427 v = r.ui.config(s, k)
428 if v:
428 if v:
429 self.ui.setconfig(s, k, v, 'subrepo')
429 self.ui.setconfig(s, k, v, 'subrepo')
430 # internal config: ui._usedassubrepo
430 # internal config: ui._usedassubrepo
431 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
431 self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo')
432 self._initrepo(r, state[0], create)
432 self._initrepo(r, state[0], create)
433
433
434 @annotatesubrepoerror
434 @annotatesubrepoerror
435 def addwebdirpath(self, serverpath, webconf):
435 def addwebdirpath(self, serverpath, webconf):
436 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
436 cmdutil.addwebdirpath(self._repo, subrelpath(self), webconf)
437
437
438 def storeclean(self, path):
438 def storeclean(self, path):
439 with self._repo.lock():
439 with self._repo.lock():
440 return self._storeclean(path)
440 return self._storeclean(path)
441
441
442 def _storeclean(self, path):
442 def _storeclean(self, path):
443 clean = True
443 clean = True
444 itercache = self._calcstorehash(path)
444 itercache = self._calcstorehash(path)
445 for filehash in self._readstorehashcache(path):
445 for filehash in self._readstorehashcache(path):
446 if filehash != next(itercache, None):
446 if filehash != next(itercache, None):
447 clean = False
447 clean = False
448 break
448 break
449 if clean:
449 if clean:
450 # if not empty:
450 # if not empty:
451 # the cached and current pull states have a different size
451 # the cached and current pull states have a different size
452 clean = next(itercache, None) is None
452 clean = next(itercache, None) is None
453 return clean
453 return clean
454
454
455 def _calcstorehash(self, remotepath):
455 def _calcstorehash(self, remotepath):
456 '''calculate a unique "store hash"
456 '''calculate a unique "store hash"
457
457
458 This method is used to to detect when there are changes that may
458 This method is used to to detect when there are changes that may
459 require a push to a given remote path.'''
459 require a push to a given remote path.'''
460 # sort the files that will be hashed in increasing (likely) file size
460 # sort the files that will be hashed in increasing (likely) file size
461 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
461 filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i')
462 yield '# %s\n' % _expandedabspath(remotepath)
462 yield '# %s\n' % _expandedabspath(remotepath)
463 vfs = self._repo.vfs
463 vfs = self._repo.vfs
464 for relname in filelist:
464 for relname in filelist:
465 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
465 filehash = node.hex(hashlib.sha1(vfs.tryread(relname)).digest())
466 yield '%s = %s\n' % (relname, filehash)
466 yield '%s = %s\n' % (relname, filehash)
467
467
468 @propertycache
468 @propertycache
469 def _cachestorehashvfs(self):
469 def _cachestorehashvfs(self):
470 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
470 return vfsmod.vfs(self._repo.vfs.join('cache/storehash'))
471
471
472 def _readstorehashcache(self, remotepath):
472 def _readstorehashcache(self, remotepath):
473 '''read the store hash cache for a given remote repository'''
473 '''read the store hash cache for a given remote repository'''
474 cachefile = _getstorehashcachename(remotepath)
474 cachefile = _getstorehashcachename(remotepath)
475 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
475 return self._cachestorehashvfs.tryreadlines(cachefile, 'r')
476
476
477 def _cachestorehash(self, remotepath):
477 def _cachestorehash(self, remotepath):
478 '''cache the current store hash
478 '''cache the current store hash
479
479
480 Each remote repo requires its own store hash cache, because a subrepo
480 Each remote repo requires its own store hash cache, because a subrepo
481 store may be "clean" versus a given remote repo, but not versus another
481 store may be "clean" versus a given remote repo, but not versus another
482 '''
482 '''
483 cachefile = _getstorehashcachename(remotepath)
483 cachefile = _getstorehashcachename(remotepath)
484 with self._repo.lock():
484 with self._repo.lock():
485 storehash = list(self._calcstorehash(remotepath))
485 storehash = list(self._calcstorehash(remotepath))
486 vfs = self._cachestorehashvfs
486 vfs = self._cachestorehashvfs
487 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
487 vfs.writelines(cachefile, storehash, mode='wb', notindexed=True)
488
488
489 def _getctx(self):
489 def _getctx(self):
490 '''fetch the context for this subrepo revision, possibly a workingctx
490 '''fetch the context for this subrepo revision, possibly a workingctx
491 '''
491 '''
492 if self._ctx.rev() is None:
492 if self._ctx.rev() is None:
493 return self._repo[None] # workingctx if parent is workingctx
493 return self._repo[None] # workingctx if parent is workingctx
494 else:
494 else:
495 rev = self._state[1]
495 rev = self._state[1]
496 return self._repo[rev]
496 return self._repo[rev]
497
497
498 @annotatesubrepoerror
498 @annotatesubrepoerror
499 def _initrepo(self, parentrepo, source, create):
499 def _initrepo(self, parentrepo, source, create):
500 self._repo._subparent = parentrepo
500 self._repo._subparent = parentrepo
501 self._repo._subsource = source
501 self._repo._subsource = source
502
502
503 if create:
503 if create:
504 lines = ['[paths]\n']
504 lines = ['[paths]\n']
505
505
506 def addpathconfig(key, value):
506 def addpathconfig(key, value):
507 if value:
507 if value:
508 lines.append('%s = %s\n' % (key, value))
508 lines.append('%s = %s\n' % (key, value))
509 self.ui.setconfig('paths', key, value, 'subrepo')
509 self.ui.setconfig('paths', key, value, 'subrepo')
510
510
511 defpath = _abssource(self._repo, abort=False)
511 defpath = _abssource(self._repo, abort=False)
512 defpushpath = _abssource(self._repo, True, abort=False)
512 defpushpath = _abssource(self._repo, True, abort=False)
513 addpathconfig('default', defpath)
513 addpathconfig('default', defpath)
514 if defpath != defpushpath:
514 if defpath != defpushpath:
515 addpathconfig('default-push', defpushpath)
515 addpathconfig('default-push', defpushpath)
516
516
517 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
517 self._repo.vfs.write('hgrc', util.tonativeeol(''.join(lines)))
518
518
519 @annotatesubrepoerror
519 @annotatesubrepoerror
520 def add(self, ui, match, prefix, explicitonly, **opts):
520 def add(self, ui, match, prefix, explicitonly, **opts):
521 return cmdutil.add(ui, self._repo, match, prefix, explicitonly, **opts)
521 return cmdutil.add(ui, self._repo, match, prefix, explicitonly, **opts)
522
522
523 @annotatesubrepoerror
523 @annotatesubrepoerror
524 def addremove(self, m, prefix, opts):
524 def addremove(self, m, prefix, opts):
525 # In the same way as sub directories are processed, once in a subrepo,
525 # In the same way as sub directories are processed, once in a subrepo,
526 # always entry any of its subrepos. Don't corrupt the options that will
526 # always entry any of its subrepos. Don't corrupt the options that will
527 # be used to process sibling subrepos however.
527 # be used to process sibling subrepos however.
528 opts = copy.copy(opts)
528 opts = copy.copy(opts)
529 opts['subrepos'] = True
529 opts['subrepos'] = True
530 return scmutil.addremove(self._repo, m,
530 return scmutil.addremove(self._repo, m, prefix, opts)
531 self.wvfs.reljoin(prefix, self._path), opts)
532
531
533 @annotatesubrepoerror
532 @annotatesubrepoerror
534 def cat(self, match, fm, fntemplate, prefix, **opts):
533 def cat(self, match, fm, fntemplate, prefix, **opts):
535 rev = self._state[1]
534 rev = self._state[1]
536 ctx = self._repo[rev]
535 ctx = self._repo[rev]
537 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
536 return cmdutil.cat(self.ui, self._repo, ctx, match, fm, fntemplate,
538 prefix, **opts)
537 prefix, **opts)
539
538
540 @annotatesubrepoerror
539 @annotatesubrepoerror
541 def status(self, rev2, **opts):
540 def status(self, rev2, **opts):
542 try:
541 try:
543 rev1 = self._state[1]
542 rev1 = self._state[1]
544 ctx1 = self._repo[rev1]
543 ctx1 = self._repo[rev1]
545 ctx2 = self._repo[rev2]
544 ctx2 = self._repo[rev2]
546 return self._repo.status(ctx1, ctx2, **opts)
545 return self._repo.status(ctx1, ctx2, **opts)
547 except error.RepoLookupError as inst:
546 except error.RepoLookupError as inst:
548 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
547 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
549 % (inst, subrelpath(self)))
548 % (inst, subrelpath(self)))
550 return scmutil.status([], [], [], [], [], [], [])
549 return scmutil.status([], [], [], [], [], [], [])
551
550
552 @annotatesubrepoerror
551 @annotatesubrepoerror
553 def diff(self, ui, diffopts, node2, match, prefix, **opts):
552 def diff(self, ui, diffopts, node2, match, prefix, **opts):
554 try:
553 try:
555 node1 = node.bin(self._state[1])
554 node1 = node.bin(self._state[1])
556 # We currently expect node2 to come from substate and be
555 # We currently expect node2 to come from substate and be
557 # in hex format
556 # in hex format
558 if node2 is not None:
557 if node2 is not None:
559 node2 = node.bin(node2)
558 node2 = node.bin(node2)
560 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
559 logcmdutil.diffordiffstat(ui, self._repo, diffopts,
561 node1, node2, match,
560 node1, node2, match,
562 prefix=posixpath.join(prefix, self._path),
561 prefix=posixpath.join(prefix, self._path),
563 listsubrepos=True, **opts)
562 listsubrepos=True, **opts)
564 except error.RepoLookupError as inst:
563 except error.RepoLookupError as inst:
565 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
564 self.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
566 % (inst, subrelpath(self)))
565 % (inst, subrelpath(self)))
567
566
568 @annotatesubrepoerror
567 @annotatesubrepoerror
569 def archive(self, archiver, prefix, match=None, decode=True):
568 def archive(self, archiver, prefix, match=None, decode=True):
570 self._get(self._state + ('hg',))
569 self._get(self._state + ('hg',))
571 files = self.files()
570 files = self.files()
572 if match:
571 if match:
573 files = [f for f in files if match(f)]
572 files = [f for f in files if match(f)]
574 rev = self._state[1]
573 rev = self._state[1]
575 ctx = self._repo[rev]
574 ctx = self._repo[rev]
576 scmutil.prefetchfiles(self._repo, [ctx.rev()],
575 scmutil.prefetchfiles(self._repo, [ctx.rev()],
577 scmutil.matchfiles(self._repo, files))
576 scmutil.matchfiles(self._repo, files))
578 total = abstractsubrepo.archive(self, archiver, prefix, match)
577 total = abstractsubrepo.archive(self, archiver, prefix, match)
579 for subpath in ctx.substate:
578 for subpath in ctx.substate:
580 s = subrepo(ctx, subpath, True)
579 s = subrepo(ctx, subpath, True)
581 submatch = matchmod.subdirmatcher(subpath, match)
580 submatch = matchmod.subdirmatcher(subpath, match)
582 total += s.archive(archiver, prefix + self._path + '/', submatch,
581 total += s.archive(archiver, prefix + self._path + '/', submatch,
583 decode)
582 decode)
584 return total
583 return total
585
584
586 @annotatesubrepoerror
585 @annotatesubrepoerror
587 def dirty(self, ignoreupdate=False, missing=False):
586 def dirty(self, ignoreupdate=False, missing=False):
588 r = self._state[1]
587 r = self._state[1]
589 if r == '' and not ignoreupdate: # no state recorded
588 if r == '' and not ignoreupdate: # no state recorded
590 return True
589 return True
591 w = self._repo[None]
590 w = self._repo[None]
592 if r != w.p1().hex() and not ignoreupdate:
591 if r != w.p1().hex() and not ignoreupdate:
593 # different version checked out
592 # different version checked out
594 return True
593 return True
595 return w.dirty(missing=missing) # working directory changed
594 return w.dirty(missing=missing) # working directory changed
596
595
597 def basestate(self):
596 def basestate(self):
598 return self._repo['.'].hex()
597 return self._repo['.'].hex()
599
598
600 def checknested(self, path):
599 def checknested(self, path):
601 return self._repo._checknested(self._repo.wjoin(path))
600 return self._repo._checknested(self._repo.wjoin(path))
602
601
603 @annotatesubrepoerror
602 @annotatesubrepoerror
604 def commit(self, text, user, date):
603 def commit(self, text, user, date):
605 # don't bother committing in the subrepo if it's only been
604 # don't bother committing in the subrepo if it's only been
606 # updated
605 # updated
607 if not self.dirty(True):
606 if not self.dirty(True):
608 return self._repo['.'].hex()
607 return self._repo['.'].hex()
609 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
608 self.ui.debug("committing subrepo %s\n" % subrelpath(self))
610 n = self._repo.commit(text, user, date)
609 n = self._repo.commit(text, user, date)
611 if not n:
610 if not n:
612 return self._repo['.'].hex() # different version checked out
611 return self._repo['.'].hex() # different version checked out
613 return node.hex(n)
612 return node.hex(n)
614
613
615 @annotatesubrepoerror
614 @annotatesubrepoerror
616 def phase(self, state):
615 def phase(self, state):
617 return self._repo[state or '.'].phase()
616 return self._repo[state or '.'].phase()
618
617
619 @annotatesubrepoerror
618 @annotatesubrepoerror
620 def remove(self):
619 def remove(self):
621 # we can't fully delete the repository as it may contain
620 # we can't fully delete the repository as it may contain
622 # local-only history
621 # local-only history
623 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
622 self.ui.note(_('removing subrepo %s\n') % subrelpath(self))
624 hg.clean(self._repo, node.nullid, False)
623 hg.clean(self._repo, node.nullid, False)
625
624
626 def _get(self, state):
625 def _get(self, state):
627 source, revision, kind = state
626 source, revision, kind = state
628 parentrepo = self._repo._subparent
627 parentrepo = self._repo._subparent
629
628
630 if revision in self._repo.unfiltered():
629 if revision in self._repo.unfiltered():
631 # Allow shared subrepos tracked at null to setup the sharedpath
630 # Allow shared subrepos tracked at null to setup the sharedpath
632 if len(self._repo) != 0 or not parentrepo.shared():
631 if len(self._repo) != 0 or not parentrepo.shared():
633 return True
632 return True
634 self._repo._subsource = source
633 self._repo._subsource = source
635 srcurl = _abssource(self._repo)
634 srcurl = _abssource(self._repo)
636
635
637 # Defer creating the peer until after the status message is logged, in
636 # Defer creating the peer until after the status message is logged, in
638 # case there are network problems.
637 # case there are network problems.
639 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
638 getpeer = lambda: hg.peer(self._repo, {}, srcurl)
640
639
641 if len(self._repo) == 0:
640 if len(self._repo) == 0:
642 # use self._repo.vfs instead of self.wvfs to remove .hg only
641 # use self._repo.vfs instead of self.wvfs to remove .hg only
643 self._repo.vfs.rmtree()
642 self._repo.vfs.rmtree()
644
643
645 # A remote subrepo could be shared if there is a local copy
644 # A remote subrepo could be shared if there is a local copy
646 # relative to the parent's share source. But clone pooling doesn't
645 # relative to the parent's share source. But clone pooling doesn't
647 # assemble the repos in a tree, so that can't be consistently done.
646 # assemble the repos in a tree, so that can't be consistently done.
648 # A simpler option is for the user to configure clone pooling, and
647 # A simpler option is for the user to configure clone pooling, and
649 # work with that.
648 # work with that.
650 if parentrepo.shared() and hg.islocal(srcurl):
649 if parentrepo.shared() and hg.islocal(srcurl):
651 self.ui.status(_('sharing subrepo %s from %s\n')
650 self.ui.status(_('sharing subrepo %s from %s\n')
652 % (subrelpath(self), srcurl))
651 % (subrelpath(self), srcurl))
653 shared = hg.share(self._repo._subparent.baseui,
652 shared = hg.share(self._repo._subparent.baseui,
654 getpeer(), self._repo.root,
653 getpeer(), self._repo.root,
655 update=False, bookmarks=False)
654 update=False, bookmarks=False)
656 self._repo = shared.local()
655 self._repo = shared.local()
657 else:
656 else:
658 # TODO: find a common place for this and this code in the
657 # TODO: find a common place for this and this code in the
659 # share.py wrap of the clone command.
658 # share.py wrap of the clone command.
660 if parentrepo.shared():
659 if parentrepo.shared():
661 pool = self.ui.config('share', 'pool')
660 pool = self.ui.config('share', 'pool')
662 if pool:
661 if pool:
663 pool = util.expandpath(pool)
662 pool = util.expandpath(pool)
664
663
665 shareopts = {
664 shareopts = {
666 'pool': pool,
665 'pool': pool,
667 'mode': self.ui.config('share', 'poolnaming'),
666 'mode': self.ui.config('share', 'poolnaming'),
668 }
667 }
669 else:
668 else:
670 shareopts = {}
669 shareopts = {}
671
670
672 self.ui.status(_('cloning subrepo %s from %s\n')
671 self.ui.status(_('cloning subrepo %s from %s\n')
673 % (subrelpath(self), util.hidepassword(srcurl)))
672 % (subrelpath(self), util.hidepassword(srcurl)))
674 other, cloned = hg.clone(self._repo._subparent.baseui, {},
673 other, cloned = hg.clone(self._repo._subparent.baseui, {},
675 getpeer(), self._repo.root,
674 getpeer(), self._repo.root,
676 update=False, shareopts=shareopts)
675 update=False, shareopts=shareopts)
677 self._repo = cloned.local()
676 self._repo = cloned.local()
678 self._initrepo(parentrepo, source, create=True)
677 self._initrepo(parentrepo, source, create=True)
679 self._cachestorehash(srcurl)
678 self._cachestorehash(srcurl)
680 else:
679 else:
681 self.ui.status(_('pulling subrepo %s from %s\n')
680 self.ui.status(_('pulling subrepo %s from %s\n')
682 % (subrelpath(self), util.hidepassword(srcurl)))
681 % (subrelpath(self), util.hidepassword(srcurl)))
683 cleansub = self.storeclean(srcurl)
682 cleansub = self.storeclean(srcurl)
684 exchange.pull(self._repo, getpeer())
683 exchange.pull(self._repo, getpeer())
685 if cleansub:
684 if cleansub:
686 # keep the repo clean after pull
685 # keep the repo clean after pull
687 self._cachestorehash(srcurl)
686 self._cachestorehash(srcurl)
688 return False
687 return False
689
688
690 @annotatesubrepoerror
689 @annotatesubrepoerror
691 def get(self, state, overwrite=False):
690 def get(self, state, overwrite=False):
692 inrepo = self._get(state)
691 inrepo = self._get(state)
693 source, revision, kind = state
692 source, revision, kind = state
694 repo = self._repo
693 repo = self._repo
695 repo.ui.debug("getting subrepo %s\n" % self._path)
694 repo.ui.debug("getting subrepo %s\n" % self._path)
696 if inrepo:
695 if inrepo:
697 urepo = repo.unfiltered()
696 urepo = repo.unfiltered()
698 ctx = urepo[revision]
697 ctx = urepo[revision]
699 if ctx.hidden():
698 if ctx.hidden():
700 urepo.ui.warn(
699 urepo.ui.warn(
701 _('revision %s in subrepository "%s" is hidden\n') \
700 _('revision %s in subrepository "%s" is hidden\n') \
702 % (revision[0:12], self._path))
701 % (revision[0:12], self._path))
703 repo = urepo
702 repo = urepo
704 hg.updaterepo(repo, revision, overwrite)
703 hg.updaterepo(repo, revision, overwrite)
705
704
706 @annotatesubrepoerror
705 @annotatesubrepoerror
707 def merge(self, state):
706 def merge(self, state):
708 self._get(state)
707 self._get(state)
709 cur = self._repo['.']
708 cur = self._repo['.']
710 dst = self._repo[state[1]]
709 dst = self._repo[state[1]]
711 anc = dst.ancestor(cur)
710 anc = dst.ancestor(cur)
712
711
713 def mergefunc():
712 def mergefunc():
714 if anc == cur and dst.branch() == cur.branch():
713 if anc == cur and dst.branch() == cur.branch():
715 self.ui.debug('updating subrepository "%s"\n'
714 self.ui.debug('updating subrepository "%s"\n'
716 % subrelpath(self))
715 % subrelpath(self))
717 hg.update(self._repo, state[1])
716 hg.update(self._repo, state[1])
718 elif anc == dst:
717 elif anc == dst:
719 self.ui.debug('skipping subrepository "%s"\n'
718 self.ui.debug('skipping subrepository "%s"\n'
720 % subrelpath(self))
719 % subrelpath(self))
721 else:
720 else:
722 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
721 self.ui.debug('merging subrepository "%s"\n' % subrelpath(self))
723 hg.merge(self._repo, state[1], remind=False)
722 hg.merge(self._repo, state[1], remind=False)
724
723
725 wctx = self._repo[None]
724 wctx = self._repo[None]
726 if self.dirty():
725 if self.dirty():
727 if anc != dst:
726 if anc != dst:
728 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
727 if _updateprompt(self.ui, self, wctx.dirty(), cur, dst):
729 mergefunc()
728 mergefunc()
730 else:
729 else:
731 mergefunc()
730 mergefunc()
732 else:
731 else:
733 mergefunc()
732 mergefunc()
734
733
735 @annotatesubrepoerror
734 @annotatesubrepoerror
736 def push(self, opts):
735 def push(self, opts):
737 force = opts.get('force')
736 force = opts.get('force')
738 newbranch = opts.get('new_branch')
737 newbranch = opts.get('new_branch')
739 ssh = opts.get('ssh')
738 ssh = opts.get('ssh')
740
739
741 # push subrepos depth-first for coherent ordering
740 # push subrepos depth-first for coherent ordering
742 c = self._repo['.']
741 c = self._repo['.']
743 subs = c.substate # only repos that are committed
742 subs = c.substate # only repos that are committed
744 for s in sorted(subs):
743 for s in sorted(subs):
745 if c.sub(s).push(opts) == 0:
744 if c.sub(s).push(opts) == 0:
746 return False
745 return False
747
746
748 dsturl = _abssource(self._repo, True)
747 dsturl = _abssource(self._repo, True)
749 if not force:
748 if not force:
750 if self.storeclean(dsturl):
749 if self.storeclean(dsturl):
751 self.ui.status(
750 self.ui.status(
752 _('no changes made to subrepo %s since last push to %s\n')
751 _('no changes made to subrepo %s since last push to %s\n')
753 % (subrelpath(self), util.hidepassword(dsturl)))
752 % (subrelpath(self), util.hidepassword(dsturl)))
754 return None
753 return None
755 self.ui.status(_('pushing subrepo %s to %s\n') %
754 self.ui.status(_('pushing subrepo %s to %s\n') %
756 (subrelpath(self), util.hidepassword(dsturl)))
755 (subrelpath(self), util.hidepassword(dsturl)))
757 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
756 other = hg.peer(self._repo, {'ssh': ssh}, dsturl)
758 res = exchange.push(self._repo, other, force, newbranch=newbranch)
757 res = exchange.push(self._repo, other, force, newbranch=newbranch)
759
758
760 # the repo is now clean
759 # the repo is now clean
761 self._cachestorehash(dsturl)
760 self._cachestorehash(dsturl)
762 return res.cgresult
761 return res.cgresult
763
762
764 @annotatesubrepoerror
763 @annotatesubrepoerror
765 def outgoing(self, ui, dest, opts):
764 def outgoing(self, ui, dest, opts):
766 if 'rev' in opts or 'branch' in opts:
765 if 'rev' in opts or 'branch' in opts:
767 opts = copy.copy(opts)
766 opts = copy.copy(opts)
768 opts.pop('rev', None)
767 opts.pop('rev', None)
769 opts.pop('branch', None)
768 opts.pop('branch', None)
770 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
769 return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
771
770
772 @annotatesubrepoerror
771 @annotatesubrepoerror
773 def incoming(self, ui, source, opts):
772 def incoming(self, ui, source, opts):
774 if 'rev' in opts or 'branch' in opts:
773 if 'rev' in opts or 'branch' in opts:
775 opts = copy.copy(opts)
774 opts = copy.copy(opts)
776 opts.pop('rev', None)
775 opts.pop('rev', None)
777 opts.pop('branch', None)
776 opts.pop('branch', None)
778 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
777 return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts)
779
778
780 @annotatesubrepoerror
779 @annotatesubrepoerror
781 def files(self):
780 def files(self):
782 rev = self._state[1]
781 rev = self._state[1]
783 ctx = self._repo[rev]
782 ctx = self._repo[rev]
784 return ctx.manifest().keys()
783 return ctx.manifest().keys()
785
784
786 def filedata(self, name, decode):
785 def filedata(self, name, decode):
787 rev = self._state[1]
786 rev = self._state[1]
788 data = self._repo[rev][name].data()
787 data = self._repo[rev][name].data()
789 if decode:
788 if decode:
790 data = self._repo.wwritedata(name, data)
789 data = self._repo.wwritedata(name, data)
791 return data
790 return data
792
791
793 def fileflags(self, name):
792 def fileflags(self, name):
794 rev = self._state[1]
793 rev = self._state[1]
795 ctx = self._repo[rev]
794 ctx = self._repo[rev]
796 return ctx.flags(name)
795 return ctx.flags(name)
797
796
798 @annotatesubrepoerror
797 @annotatesubrepoerror
799 def printfiles(self, ui, m, fm, fmt, subrepos):
798 def printfiles(self, ui, m, fm, fmt, subrepos):
800 # If the parent context is a workingctx, use the workingctx here for
799 # If the parent context is a workingctx, use the workingctx here for
801 # consistency.
800 # consistency.
802 if self._ctx.rev() is None:
801 if self._ctx.rev() is None:
803 ctx = self._repo[None]
802 ctx = self._repo[None]
804 else:
803 else:
805 rev = self._state[1]
804 rev = self._state[1]
806 ctx = self._repo[rev]
805 ctx = self._repo[rev]
807 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
806 return cmdutil.files(ui, ctx, m, fm, fmt, subrepos)
808
807
809 @annotatesubrepoerror
808 @annotatesubrepoerror
810 def matchfileset(self, expr, badfn=None):
809 def matchfileset(self, expr, badfn=None):
811 repo = self._repo
810 repo = self._repo
812 if self._ctx.rev() is None:
811 if self._ctx.rev() is None:
813 ctx = repo[None]
812 ctx = repo[None]
814 else:
813 else:
815 rev = self._state[1]
814 rev = self._state[1]
816 ctx = repo[rev]
815 ctx = repo[rev]
817
816
818 matchers = [ctx.matchfileset(expr, badfn=badfn)]
817 matchers = [ctx.matchfileset(expr, badfn=badfn)]
819
818
820 for subpath in ctx.substate:
819 for subpath in ctx.substate:
821 sub = ctx.sub(subpath)
820 sub = ctx.sub(subpath)
822
821
823 try:
822 try:
824 sm = sub.matchfileset(expr, badfn=badfn)
823 sm = sub.matchfileset(expr, badfn=badfn)
825 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
824 pm = matchmod.prefixdirmatcher(repo.root, repo.getcwd(),
826 subpath, sm, badfn=badfn)
825 subpath, sm, badfn=badfn)
827 matchers.append(pm)
826 matchers.append(pm)
828 except error.LookupError:
827 except error.LookupError:
829 self.ui.status(_("skipping missing subrepository: %s\n")
828 self.ui.status(_("skipping missing subrepository: %s\n")
830 % self.wvfs.reljoin(reporelpath(self), subpath))
829 % self.wvfs.reljoin(reporelpath(self), subpath))
831 if len(matchers) == 1:
830 if len(matchers) == 1:
832 return matchers[0]
831 return matchers[0]
833 return matchmod.unionmatcher(matchers)
832 return matchmod.unionmatcher(matchers)
834
833
835 def walk(self, match):
834 def walk(self, match):
836 ctx = self._repo[None]
835 ctx = self._repo[None]
837 return ctx.walk(match)
836 return ctx.walk(match)
838
837
839 @annotatesubrepoerror
838 @annotatesubrepoerror
840 def forget(self, match, prefix, dryrun, interactive):
839 def forget(self, match, prefix, dryrun, interactive):
841 return cmdutil.forget(self.ui, self._repo, match, prefix,
840 return cmdutil.forget(self.ui, self._repo, match, prefix,
842 True, dryrun=dryrun, interactive=interactive)
841 True, dryrun=dryrun, interactive=interactive)
843
842
844 @annotatesubrepoerror
843 @annotatesubrepoerror
845 def removefiles(self, matcher, prefix, after, force, subrepos,
844 def removefiles(self, matcher, prefix, after, force, subrepos,
846 dryrun, warnings):
845 dryrun, warnings):
847 return cmdutil.remove(self.ui, self._repo, matcher, prefix,
846 return cmdutil.remove(self.ui, self._repo, matcher, prefix,
848 after, force, subrepos, dryrun)
847 after, force, subrepos, dryrun)
849
848
850 @annotatesubrepoerror
849 @annotatesubrepoerror
851 def revert(self, substate, *pats, **opts):
850 def revert(self, substate, *pats, **opts):
852 # reverting a subrepo is a 2 step process:
851 # reverting a subrepo is a 2 step process:
853 # 1. if the no_backup is not set, revert all modified
852 # 1. if the no_backup is not set, revert all modified
854 # files inside the subrepo
853 # files inside the subrepo
855 # 2. update the subrepo to the revision specified in
854 # 2. update the subrepo to the revision specified in
856 # the corresponding substate dictionary
855 # the corresponding substate dictionary
857 self.ui.status(_('reverting subrepo %s\n') % substate[0])
856 self.ui.status(_('reverting subrepo %s\n') % substate[0])
858 if not opts.get(r'no_backup'):
857 if not opts.get(r'no_backup'):
859 # Revert all files on the subrepo, creating backups
858 # Revert all files on the subrepo, creating backups
860 # Note that this will not recursively revert subrepos
859 # Note that this will not recursively revert subrepos
861 # We could do it if there was a set:subrepos() predicate
860 # We could do it if there was a set:subrepos() predicate
862 opts = opts.copy()
861 opts = opts.copy()
863 opts[r'date'] = None
862 opts[r'date'] = None
864 opts[r'rev'] = substate[1]
863 opts[r'rev'] = substate[1]
865
864
866 self.filerevert(*pats, **opts)
865 self.filerevert(*pats, **opts)
867
866
868 # Update the repo to the revision specified in the given substate
867 # Update the repo to the revision specified in the given substate
869 if not opts.get(r'dry_run'):
868 if not opts.get(r'dry_run'):
870 self.get(substate, overwrite=True)
869 self.get(substate, overwrite=True)
871
870
872 def filerevert(self, *pats, **opts):
871 def filerevert(self, *pats, **opts):
873 ctx = self._repo[opts[r'rev']]
872 ctx = self._repo[opts[r'rev']]
874 parents = self._repo.dirstate.parents()
873 parents = self._repo.dirstate.parents()
875 if opts.get(r'all'):
874 if opts.get(r'all'):
876 pats = ['set:modified()']
875 pats = ['set:modified()']
877 else:
876 else:
878 pats = []
877 pats = []
879 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
878 cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts)
880
879
881 def shortid(self, revid):
880 def shortid(self, revid):
882 return revid[:12]
881 return revid[:12]
883
882
884 @annotatesubrepoerror
883 @annotatesubrepoerror
885 def unshare(self):
884 def unshare(self):
886 # subrepo inherently violates our import layering rules
885 # subrepo inherently violates our import layering rules
887 # because it wants to make repo objects from deep inside the stack
886 # because it wants to make repo objects from deep inside the stack
888 # so we manually delay the circular imports to not break
887 # so we manually delay the circular imports to not break
889 # scripts that don't use our demand-loading
888 # scripts that don't use our demand-loading
890 global hg
889 global hg
891 from . import hg as h
890 from . import hg as h
892 hg = h
891 hg = h
893
892
894 # Nothing prevents a user from sharing in a repo, and then making that a
893 # Nothing prevents a user from sharing in a repo, and then making that a
895 # subrepo. Alternately, the previous unshare attempt may have failed
894 # subrepo. Alternately, the previous unshare attempt may have failed
896 # part way through. So recurse whether or not this layer is shared.
895 # part way through. So recurse whether or not this layer is shared.
897 if self._repo.shared():
896 if self._repo.shared():
898 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
897 self.ui.status(_("unsharing subrepo '%s'\n") % self._relpath)
899
898
900 hg.unshare(self.ui, self._repo)
899 hg.unshare(self.ui, self._repo)
901
900
902 def verify(self):
901 def verify(self):
903 try:
902 try:
904 rev = self._state[1]
903 rev = self._state[1]
905 ctx = self._repo.unfiltered()[rev]
904 ctx = self._repo.unfiltered()[rev]
906 if ctx.hidden():
905 if ctx.hidden():
907 # Since hidden revisions aren't pushed/pulled, it seems worth an
906 # Since hidden revisions aren't pushed/pulled, it seems worth an
908 # explicit warning.
907 # explicit warning.
909 ui = self._repo.ui
908 ui = self._repo.ui
910 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
909 ui.warn(_("subrepo '%s' is hidden in revision %s\n") %
911 (self._relpath, node.short(self._ctx.node())))
910 (self._relpath, node.short(self._ctx.node())))
912 return 0
911 return 0
913 except error.RepoLookupError:
912 except error.RepoLookupError:
914 # A missing subrepo revision may be a case of needing to pull it, so
913 # A missing subrepo revision may be a case of needing to pull it, so
915 # don't treat this as an error.
914 # don't treat this as an error.
916 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
915 self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") %
917 (self._relpath, node.short(self._ctx.node())))
916 (self._relpath, node.short(self._ctx.node())))
918 return 0
917 return 0
919
918
920 @propertycache
919 @propertycache
921 def wvfs(self):
920 def wvfs(self):
922 """return own wvfs for efficiency and consistency
921 """return own wvfs for efficiency and consistency
923 """
922 """
924 return self._repo.wvfs
923 return self._repo.wvfs
925
924
926 @propertycache
925 @propertycache
927 def _relpath(self):
926 def _relpath(self):
928 """return path to this subrepository as seen from outermost repository
927 """return path to this subrepository as seen from outermost repository
929 """
928 """
930 # Keep consistent dir separators by avoiding vfs.join(self._path)
929 # Keep consistent dir separators by avoiding vfs.join(self._path)
931 return reporelpath(self._repo)
930 return reporelpath(self._repo)
932
931
933 class svnsubrepo(abstractsubrepo):
932 class svnsubrepo(abstractsubrepo):
934 def __init__(self, ctx, path, state, allowcreate):
933 def __init__(self, ctx, path, state, allowcreate):
935 super(svnsubrepo, self).__init__(ctx, path)
934 super(svnsubrepo, self).__init__(ctx, path)
936 self._state = state
935 self._state = state
937 self._exe = procutil.findexe('svn')
936 self._exe = procutil.findexe('svn')
938 if not self._exe:
937 if not self._exe:
939 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
938 raise error.Abort(_("'svn' executable not found for subrepo '%s'")
940 % self._path)
939 % self._path)
941
940
942 def _svncommand(self, commands, filename='', failok=False):
941 def _svncommand(self, commands, filename='', failok=False):
943 cmd = [self._exe]
942 cmd = [self._exe]
944 extrakw = {}
943 extrakw = {}
945 if not self.ui.interactive():
944 if not self.ui.interactive():
946 # Making stdin be a pipe should prevent svn from behaving
945 # Making stdin be a pipe should prevent svn from behaving
947 # interactively even if we can't pass --non-interactive.
946 # interactively even if we can't pass --non-interactive.
948 extrakw[r'stdin'] = subprocess.PIPE
947 extrakw[r'stdin'] = subprocess.PIPE
949 # Starting in svn 1.5 --non-interactive is a global flag
948 # Starting in svn 1.5 --non-interactive is a global flag
950 # instead of being per-command, but we need to support 1.4 so
949 # instead of being per-command, but we need to support 1.4 so
951 # we have to be intelligent about what commands take
950 # we have to be intelligent about what commands take
952 # --non-interactive.
951 # --non-interactive.
953 if commands[0] in ('update', 'checkout', 'commit'):
952 if commands[0] in ('update', 'checkout', 'commit'):
954 cmd.append('--non-interactive')
953 cmd.append('--non-interactive')
955 cmd.extend(commands)
954 cmd.extend(commands)
956 if filename is not None:
955 if filename is not None:
957 path = self.wvfs.reljoin(self._ctx.repo().origroot,
956 path = self.wvfs.reljoin(self._ctx.repo().origroot,
958 self._path, filename)
957 self._path, filename)
959 cmd.append(path)
958 cmd.append(path)
960 env = dict(encoding.environ)
959 env = dict(encoding.environ)
961 # Avoid localized output, preserve current locale for everything else.
960 # Avoid localized output, preserve current locale for everything else.
962 lc_all = env.get('LC_ALL')
961 lc_all = env.get('LC_ALL')
963 if lc_all:
962 if lc_all:
964 env['LANG'] = lc_all
963 env['LANG'] = lc_all
965 del env['LC_ALL']
964 del env['LC_ALL']
966 env['LC_MESSAGES'] = 'C'
965 env['LC_MESSAGES'] = 'C'
967 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
966 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr, cmd),
968 bufsize=-1, close_fds=procutil.closefds,
967 bufsize=-1, close_fds=procutil.closefds,
969 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
968 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
970 env=procutil.tonativeenv(env), **extrakw)
969 env=procutil.tonativeenv(env), **extrakw)
971 stdout, stderr = map(util.fromnativeeol, p.communicate())
970 stdout, stderr = map(util.fromnativeeol, p.communicate())
972 stderr = stderr.strip()
971 stderr = stderr.strip()
973 if not failok:
972 if not failok:
974 if p.returncode:
973 if p.returncode:
975 raise error.Abort(stderr or 'exited with code %d'
974 raise error.Abort(stderr or 'exited with code %d'
976 % p.returncode)
975 % p.returncode)
977 if stderr:
976 if stderr:
978 self.ui.warn(stderr + '\n')
977 self.ui.warn(stderr + '\n')
979 return stdout, stderr
978 return stdout, stderr
980
979
981 @propertycache
980 @propertycache
982 def _svnversion(self):
981 def _svnversion(self):
983 output, err = self._svncommand(['--version', '--quiet'], filename=None)
982 output, err = self._svncommand(['--version', '--quiet'], filename=None)
984 m = re.search(br'^(\d+)\.(\d+)', output)
983 m = re.search(br'^(\d+)\.(\d+)', output)
985 if not m:
984 if not m:
986 raise error.Abort(_('cannot retrieve svn tool version'))
985 raise error.Abort(_('cannot retrieve svn tool version'))
987 return (int(m.group(1)), int(m.group(2)))
986 return (int(m.group(1)), int(m.group(2)))
988
987
989 def _svnmissing(self):
988 def _svnmissing(self):
990 return not self.wvfs.exists('.svn')
989 return not self.wvfs.exists('.svn')
991
990
992 def _wcrevs(self):
991 def _wcrevs(self):
993 # Get the working directory revision as well as the last
992 # Get the working directory revision as well as the last
994 # commit revision so we can compare the subrepo state with
993 # commit revision so we can compare the subrepo state with
995 # both. We used to store the working directory one.
994 # both. We used to store the working directory one.
996 output, err = self._svncommand(['info', '--xml'])
995 output, err = self._svncommand(['info', '--xml'])
997 doc = xml.dom.minidom.parseString(output)
996 doc = xml.dom.minidom.parseString(output)
998 entries = doc.getElementsByTagName(r'entry')
997 entries = doc.getElementsByTagName(r'entry')
999 lastrev, rev = '0', '0'
998 lastrev, rev = '0', '0'
1000 if entries:
999 if entries:
1001 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1000 rev = pycompat.bytestr(entries[0].getAttribute(r'revision')) or '0'
1002 commits = entries[0].getElementsByTagName(r'commit')
1001 commits = entries[0].getElementsByTagName(r'commit')
1003 if commits:
1002 if commits:
1004 lastrev = pycompat.bytestr(
1003 lastrev = pycompat.bytestr(
1005 commits[0].getAttribute(r'revision')) or '0'
1004 commits[0].getAttribute(r'revision')) or '0'
1006 return (lastrev, rev)
1005 return (lastrev, rev)
1007
1006
1008 def _wcrev(self):
1007 def _wcrev(self):
1009 return self._wcrevs()[0]
1008 return self._wcrevs()[0]
1010
1009
1011 def _wcchanged(self):
1010 def _wcchanged(self):
1012 """Return (changes, extchanges, missing) where changes is True
1011 """Return (changes, extchanges, missing) where changes is True
1013 if the working directory was changed, extchanges is
1012 if the working directory was changed, extchanges is
1014 True if any of these changes concern an external entry and missing
1013 True if any of these changes concern an external entry and missing
1015 is True if any change is a missing entry.
1014 is True if any change is a missing entry.
1016 """
1015 """
1017 output, err = self._svncommand(['status', '--xml'])
1016 output, err = self._svncommand(['status', '--xml'])
1018 externals, changes, missing = [], [], []
1017 externals, changes, missing = [], [], []
1019 doc = xml.dom.minidom.parseString(output)
1018 doc = xml.dom.minidom.parseString(output)
1020 for e in doc.getElementsByTagName(r'entry'):
1019 for e in doc.getElementsByTagName(r'entry'):
1021 s = e.getElementsByTagName(r'wc-status')
1020 s = e.getElementsByTagName(r'wc-status')
1022 if not s:
1021 if not s:
1023 continue
1022 continue
1024 item = s[0].getAttribute(r'item')
1023 item = s[0].getAttribute(r'item')
1025 props = s[0].getAttribute(r'props')
1024 props = s[0].getAttribute(r'props')
1026 path = e.getAttribute(r'path').encode('utf8')
1025 path = e.getAttribute(r'path').encode('utf8')
1027 if item == r'external':
1026 if item == r'external':
1028 externals.append(path)
1027 externals.append(path)
1029 elif item == r'missing':
1028 elif item == r'missing':
1030 missing.append(path)
1029 missing.append(path)
1031 if (item not in (r'', r'normal', r'unversioned', r'external')
1030 if (item not in (r'', r'normal', r'unversioned', r'external')
1032 or props not in (r'', r'none', r'normal')):
1031 or props not in (r'', r'none', r'normal')):
1033 changes.append(path)
1032 changes.append(path)
1034 for path in changes:
1033 for path in changes:
1035 for ext in externals:
1034 for ext in externals:
1036 if path == ext or path.startswith(ext + pycompat.ossep):
1035 if path == ext or path.startswith(ext + pycompat.ossep):
1037 return True, True, bool(missing)
1036 return True, True, bool(missing)
1038 return bool(changes), False, bool(missing)
1037 return bool(changes), False, bool(missing)
1039
1038
1040 @annotatesubrepoerror
1039 @annotatesubrepoerror
1041 def dirty(self, ignoreupdate=False, missing=False):
1040 def dirty(self, ignoreupdate=False, missing=False):
1042 if self._svnmissing():
1041 if self._svnmissing():
1043 return self._state[1] != ''
1042 return self._state[1] != ''
1044 wcchanged = self._wcchanged()
1043 wcchanged = self._wcchanged()
1045 changed = wcchanged[0] or (missing and wcchanged[2])
1044 changed = wcchanged[0] or (missing and wcchanged[2])
1046 if not changed:
1045 if not changed:
1047 if self._state[1] in self._wcrevs() or ignoreupdate:
1046 if self._state[1] in self._wcrevs() or ignoreupdate:
1048 return False
1047 return False
1049 return True
1048 return True
1050
1049
1051 def basestate(self):
1050 def basestate(self):
1052 lastrev, rev = self._wcrevs()
1051 lastrev, rev = self._wcrevs()
1053 if lastrev != rev:
1052 if lastrev != rev:
1054 # Last committed rev is not the same than rev. We would
1053 # Last committed rev is not the same than rev. We would
1055 # like to take lastrev but we do not know if the subrepo
1054 # like to take lastrev but we do not know if the subrepo
1056 # URL exists at lastrev. Test it and fallback to rev it
1055 # URL exists at lastrev. Test it and fallback to rev it
1057 # is not there.
1056 # is not there.
1058 try:
1057 try:
1059 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1058 self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
1060 return lastrev
1059 return lastrev
1061 except error.Abort:
1060 except error.Abort:
1062 pass
1061 pass
1063 return rev
1062 return rev
1064
1063
1065 @annotatesubrepoerror
1064 @annotatesubrepoerror
1066 def commit(self, text, user, date):
1065 def commit(self, text, user, date):
1067 # user and date are out of our hands since svn is centralized
1066 # user and date are out of our hands since svn is centralized
1068 changed, extchanged, missing = self._wcchanged()
1067 changed, extchanged, missing = self._wcchanged()
1069 if not changed:
1068 if not changed:
1070 return self.basestate()
1069 return self.basestate()
1071 if extchanged:
1070 if extchanged:
1072 # Do not try to commit externals
1071 # Do not try to commit externals
1073 raise error.Abort(_('cannot commit svn externals'))
1072 raise error.Abort(_('cannot commit svn externals'))
1074 if missing:
1073 if missing:
1075 # svn can commit with missing entries but aborting like hg
1074 # svn can commit with missing entries but aborting like hg
1076 # seems a better approach.
1075 # seems a better approach.
1077 raise error.Abort(_('cannot commit missing svn entries'))
1076 raise error.Abort(_('cannot commit missing svn entries'))
1078 commitinfo, err = self._svncommand(['commit', '-m', text])
1077 commitinfo, err = self._svncommand(['commit', '-m', text])
1079 self.ui.status(commitinfo)
1078 self.ui.status(commitinfo)
1080 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1079 newrev = re.search('Committed revision ([0-9]+).', commitinfo)
1081 if not newrev:
1080 if not newrev:
1082 if not commitinfo.strip():
1081 if not commitinfo.strip():
1083 # Sometimes, our definition of "changed" differs from
1082 # Sometimes, our definition of "changed" differs from
1084 # svn one. For instance, svn ignores missing files
1083 # svn one. For instance, svn ignores missing files
1085 # when committing. If there are only missing files, no
1084 # when committing. If there are only missing files, no
1086 # commit is made, no output and no error code.
1085 # commit is made, no output and no error code.
1087 raise error.Abort(_('failed to commit svn changes'))
1086 raise error.Abort(_('failed to commit svn changes'))
1088 raise error.Abort(commitinfo.splitlines()[-1])
1087 raise error.Abort(commitinfo.splitlines()[-1])
1089 newrev = newrev.groups()[0]
1088 newrev = newrev.groups()[0]
1090 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1089 self.ui.status(self._svncommand(['update', '-r', newrev])[0])
1091 return newrev
1090 return newrev
1092
1091
1093 @annotatesubrepoerror
1092 @annotatesubrepoerror
1094 def remove(self):
1093 def remove(self):
1095 if self.dirty():
1094 if self.dirty():
1096 self.ui.warn(_('not removing repo %s because '
1095 self.ui.warn(_('not removing repo %s because '
1097 'it has changes.\n') % self._path)
1096 'it has changes.\n') % self._path)
1098 return
1097 return
1099 self.ui.note(_('removing subrepo %s\n') % self._path)
1098 self.ui.note(_('removing subrepo %s\n') % self._path)
1100
1099
1101 self.wvfs.rmtree(forcibly=True)
1100 self.wvfs.rmtree(forcibly=True)
1102 try:
1101 try:
1103 pwvfs = self._ctx.repo().wvfs
1102 pwvfs = self._ctx.repo().wvfs
1104 pwvfs.removedirs(pwvfs.dirname(self._path))
1103 pwvfs.removedirs(pwvfs.dirname(self._path))
1105 except OSError:
1104 except OSError:
1106 pass
1105 pass
1107
1106
1108 @annotatesubrepoerror
1107 @annotatesubrepoerror
1109 def get(self, state, overwrite=False):
1108 def get(self, state, overwrite=False):
1110 if overwrite:
1109 if overwrite:
1111 self._svncommand(['revert', '--recursive'])
1110 self._svncommand(['revert', '--recursive'])
1112 args = ['checkout']
1111 args = ['checkout']
1113 if self._svnversion >= (1, 5):
1112 if self._svnversion >= (1, 5):
1114 args.append('--force')
1113 args.append('--force')
1115 # The revision must be specified at the end of the URL to properly
1114 # The revision must be specified at the end of the URL to properly
1116 # update to a directory which has since been deleted and recreated.
1115 # update to a directory which has since been deleted and recreated.
1117 args.append('%s@%s' % (state[0], state[1]))
1116 args.append('%s@%s' % (state[0], state[1]))
1118
1117
1119 # SEC: check that the ssh url is safe
1118 # SEC: check that the ssh url is safe
1120 util.checksafessh(state[0])
1119 util.checksafessh(state[0])
1121
1120
1122 status, err = self._svncommand(args, failok=True)
1121 status, err = self._svncommand(args, failok=True)
1123 _sanitize(self.ui, self.wvfs, '.svn')
1122 _sanitize(self.ui, self.wvfs, '.svn')
1124 if not re.search('Checked out revision [0-9]+.', status):
1123 if not re.search('Checked out revision [0-9]+.', status):
1125 if ('is already a working copy for a different URL' in err
1124 if ('is already a working copy for a different URL' in err
1126 and (self._wcchanged()[:2] == (False, False))):
1125 and (self._wcchanged()[:2] == (False, False))):
1127 # obstructed but clean working copy, so just blow it away.
1126 # obstructed but clean working copy, so just blow it away.
1128 self.remove()
1127 self.remove()
1129 self.get(state, overwrite=False)
1128 self.get(state, overwrite=False)
1130 return
1129 return
1131 raise error.Abort((status or err).splitlines()[-1])
1130 raise error.Abort((status or err).splitlines()[-1])
1132 self.ui.status(status)
1131 self.ui.status(status)
1133
1132
1134 @annotatesubrepoerror
1133 @annotatesubrepoerror
1135 def merge(self, state):
1134 def merge(self, state):
1136 old = self._state[1]
1135 old = self._state[1]
1137 new = state[1]
1136 new = state[1]
1138 wcrev = self._wcrev()
1137 wcrev = self._wcrev()
1139 if new != wcrev:
1138 if new != wcrev:
1140 dirty = old == wcrev or self._wcchanged()[0]
1139 dirty = old == wcrev or self._wcchanged()[0]
1141 if _updateprompt(self.ui, self, dirty, wcrev, new):
1140 if _updateprompt(self.ui, self, dirty, wcrev, new):
1142 self.get(state, False)
1141 self.get(state, False)
1143
1142
1144 def push(self, opts):
1143 def push(self, opts):
1145 # push is a no-op for SVN
1144 # push is a no-op for SVN
1146 return True
1145 return True
1147
1146
1148 @annotatesubrepoerror
1147 @annotatesubrepoerror
1149 def files(self):
1148 def files(self):
1150 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1149 output = self._svncommand(['list', '--recursive', '--xml'])[0]
1151 doc = xml.dom.minidom.parseString(output)
1150 doc = xml.dom.minidom.parseString(output)
1152 paths = []
1151 paths = []
1153 for e in doc.getElementsByTagName(r'entry'):
1152 for e in doc.getElementsByTagName(r'entry'):
1154 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1153 kind = pycompat.bytestr(e.getAttribute(r'kind'))
1155 if kind != 'file':
1154 if kind != 'file':
1156 continue
1155 continue
1157 name = r''.join(c.data for c
1156 name = r''.join(c.data for c
1158 in e.getElementsByTagName(r'name')[0].childNodes
1157 in e.getElementsByTagName(r'name')[0].childNodes
1159 if c.nodeType == c.TEXT_NODE)
1158 if c.nodeType == c.TEXT_NODE)
1160 paths.append(name.encode('utf8'))
1159 paths.append(name.encode('utf8'))
1161 return paths
1160 return paths
1162
1161
1163 def filedata(self, name, decode):
1162 def filedata(self, name, decode):
1164 return self._svncommand(['cat'], name)[0]
1163 return self._svncommand(['cat'], name)[0]
1165
1164
1166
1165
1167 class gitsubrepo(abstractsubrepo):
1166 class gitsubrepo(abstractsubrepo):
1168 def __init__(self, ctx, path, state, allowcreate):
1167 def __init__(self, ctx, path, state, allowcreate):
1169 super(gitsubrepo, self).__init__(ctx, path)
1168 super(gitsubrepo, self).__init__(ctx, path)
1170 self._state = state
1169 self._state = state
1171 self._abspath = ctx.repo().wjoin(path)
1170 self._abspath = ctx.repo().wjoin(path)
1172 self._subparent = ctx.repo()
1171 self._subparent = ctx.repo()
1173 self._ensuregit()
1172 self._ensuregit()
1174
1173
1175 def _ensuregit(self):
1174 def _ensuregit(self):
1176 try:
1175 try:
1177 self._gitexecutable = 'git'
1176 self._gitexecutable = 'git'
1178 out, err = self._gitnodir(['--version'])
1177 out, err = self._gitnodir(['--version'])
1179 except OSError as e:
1178 except OSError as e:
1180 genericerror = _("error executing git for subrepo '%s': %s")
1179 genericerror = _("error executing git for subrepo '%s': %s")
1181 notfoundhint = _("check git is installed and in your PATH")
1180 notfoundhint = _("check git is installed and in your PATH")
1182 if e.errno != errno.ENOENT:
1181 if e.errno != errno.ENOENT:
1183 raise error.Abort(genericerror % (
1182 raise error.Abort(genericerror % (
1184 self._path, encoding.strtolocal(e.strerror)))
1183 self._path, encoding.strtolocal(e.strerror)))
1185 elif pycompat.iswindows:
1184 elif pycompat.iswindows:
1186 try:
1185 try:
1187 self._gitexecutable = 'git.cmd'
1186 self._gitexecutable = 'git.cmd'
1188 out, err = self._gitnodir(['--version'])
1187 out, err = self._gitnodir(['--version'])
1189 except OSError as e2:
1188 except OSError as e2:
1190 if e2.errno == errno.ENOENT:
1189 if e2.errno == errno.ENOENT:
1191 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1190 raise error.Abort(_("couldn't find 'git' or 'git.cmd'"
1192 " for subrepo '%s'") % self._path,
1191 " for subrepo '%s'") % self._path,
1193 hint=notfoundhint)
1192 hint=notfoundhint)
1194 else:
1193 else:
1195 raise error.Abort(genericerror % (self._path,
1194 raise error.Abort(genericerror % (self._path,
1196 encoding.strtolocal(e2.strerror)))
1195 encoding.strtolocal(e2.strerror)))
1197 else:
1196 else:
1198 raise error.Abort(_("couldn't find git for subrepo '%s'")
1197 raise error.Abort(_("couldn't find git for subrepo '%s'")
1199 % self._path, hint=notfoundhint)
1198 % self._path, hint=notfoundhint)
1200 versionstatus = self._checkversion(out)
1199 versionstatus = self._checkversion(out)
1201 if versionstatus == 'unknown':
1200 if versionstatus == 'unknown':
1202 self.ui.warn(_('cannot retrieve git version\n'))
1201 self.ui.warn(_('cannot retrieve git version\n'))
1203 elif versionstatus == 'abort':
1202 elif versionstatus == 'abort':
1204 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1203 raise error.Abort(_('git subrepo requires at least 1.6.0 or later'))
1205 elif versionstatus == 'warning':
1204 elif versionstatus == 'warning':
1206 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1205 self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n'))
1207
1206
1208 @staticmethod
1207 @staticmethod
1209 def _gitversion(out):
1208 def _gitversion(out):
1210 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1209 m = re.search(br'^git version (\d+)\.(\d+)\.(\d+)', out)
1211 if m:
1210 if m:
1212 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1211 return (int(m.group(1)), int(m.group(2)), int(m.group(3)))
1213
1212
1214 m = re.search(br'^git version (\d+)\.(\d+)', out)
1213 m = re.search(br'^git version (\d+)\.(\d+)', out)
1215 if m:
1214 if m:
1216 return (int(m.group(1)), int(m.group(2)), 0)
1215 return (int(m.group(1)), int(m.group(2)), 0)
1217
1216
1218 return -1
1217 return -1
1219
1218
1220 @staticmethod
1219 @staticmethod
1221 def _checkversion(out):
1220 def _checkversion(out):
1222 '''ensure git version is new enough
1221 '''ensure git version is new enough
1223
1222
1224 >>> _checkversion = gitsubrepo._checkversion
1223 >>> _checkversion = gitsubrepo._checkversion
1225 >>> _checkversion(b'git version 1.6.0')
1224 >>> _checkversion(b'git version 1.6.0')
1226 'ok'
1225 'ok'
1227 >>> _checkversion(b'git version 1.8.5')
1226 >>> _checkversion(b'git version 1.8.5')
1228 'ok'
1227 'ok'
1229 >>> _checkversion(b'git version 1.4.0')
1228 >>> _checkversion(b'git version 1.4.0')
1230 'abort'
1229 'abort'
1231 >>> _checkversion(b'git version 1.5.0')
1230 >>> _checkversion(b'git version 1.5.0')
1232 'warning'
1231 'warning'
1233 >>> _checkversion(b'git version 1.9-rc0')
1232 >>> _checkversion(b'git version 1.9-rc0')
1234 'ok'
1233 'ok'
1235 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1234 >>> _checkversion(b'git version 1.9.0.265.g81cdec2')
1236 'ok'
1235 'ok'
1237 >>> _checkversion(b'git version 1.9.0.GIT')
1236 >>> _checkversion(b'git version 1.9.0.GIT')
1238 'ok'
1237 'ok'
1239 >>> _checkversion(b'git version 12345')
1238 >>> _checkversion(b'git version 12345')
1240 'unknown'
1239 'unknown'
1241 >>> _checkversion(b'no')
1240 >>> _checkversion(b'no')
1242 'unknown'
1241 'unknown'
1243 '''
1242 '''
1244 version = gitsubrepo._gitversion(out)
1243 version = gitsubrepo._gitversion(out)
1245 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1244 # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
1246 # despite the docstring comment. For now, error on 1.4.0, warn on
1245 # despite the docstring comment. For now, error on 1.4.0, warn on
1247 # 1.5.0 but attempt to continue.
1246 # 1.5.0 but attempt to continue.
1248 if version == -1:
1247 if version == -1:
1249 return 'unknown'
1248 return 'unknown'
1250 if version < (1, 5, 0):
1249 if version < (1, 5, 0):
1251 return 'abort'
1250 return 'abort'
1252 elif version < (1, 6, 0):
1251 elif version < (1, 6, 0):
1253 return 'warning'
1252 return 'warning'
1254 return 'ok'
1253 return 'ok'
1255
1254
1256 def _gitcommand(self, commands, env=None, stream=False):
1255 def _gitcommand(self, commands, env=None, stream=False):
1257 return self._gitdir(commands, env=env, stream=stream)[0]
1256 return self._gitdir(commands, env=env, stream=stream)[0]
1258
1257
1259 def _gitdir(self, commands, env=None, stream=False):
1258 def _gitdir(self, commands, env=None, stream=False):
1260 return self._gitnodir(commands, env=env, stream=stream,
1259 return self._gitnodir(commands, env=env, stream=stream,
1261 cwd=self._abspath)
1260 cwd=self._abspath)
1262
1261
1263 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1262 def _gitnodir(self, commands, env=None, stream=False, cwd=None):
1264 """Calls the git command
1263 """Calls the git command
1265
1264
1266 The methods tries to call the git command. versions prior to 1.6.0
1265 The methods tries to call the git command. versions prior to 1.6.0
1267 are not supported and very probably fail.
1266 are not supported and very probably fail.
1268 """
1267 """
1269 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1268 self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands)))
1270 if env is None:
1269 if env is None:
1271 env = encoding.environ.copy()
1270 env = encoding.environ.copy()
1272 # disable localization for Git output (issue5176)
1271 # disable localization for Git output (issue5176)
1273 env['LC_ALL'] = 'C'
1272 env['LC_ALL'] = 'C'
1274 # fix for Git CVE-2015-7545
1273 # fix for Git CVE-2015-7545
1275 if 'GIT_ALLOW_PROTOCOL' not in env:
1274 if 'GIT_ALLOW_PROTOCOL' not in env:
1276 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1275 env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh'
1277 # unless ui.quiet is set, print git's stderr,
1276 # unless ui.quiet is set, print git's stderr,
1278 # which is mostly progress and useful info
1277 # which is mostly progress and useful info
1279 errpipe = None
1278 errpipe = None
1280 if self.ui.quiet:
1279 if self.ui.quiet:
1281 errpipe = open(os.devnull, 'w')
1280 errpipe = open(os.devnull, 'w')
1282 if self.ui._colormode and len(commands) and commands[0] == "diff":
1281 if self.ui._colormode and len(commands) and commands[0] == "diff":
1283 # insert the argument in the front,
1282 # insert the argument in the front,
1284 # the end of git diff arguments is used for paths
1283 # the end of git diff arguments is used for paths
1285 commands.insert(1, '--color')
1284 commands.insert(1, '--color')
1286 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1285 p = subprocess.Popen(pycompat.rapply(procutil.tonativestr,
1287 [self._gitexecutable] + commands),
1286 [self._gitexecutable] + commands),
1288 bufsize=-1,
1287 bufsize=-1,
1289 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1288 cwd=pycompat.rapply(procutil.tonativestr, cwd),
1290 env=procutil.tonativeenv(env),
1289 env=procutil.tonativeenv(env),
1291 close_fds=procutil.closefds,
1290 close_fds=procutil.closefds,
1292 stdout=subprocess.PIPE, stderr=errpipe)
1291 stdout=subprocess.PIPE, stderr=errpipe)
1293 if stream:
1292 if stream:
1294 return p.stdout, None
1293 return p.stdout, None
1295
1294
1296 retdata = p.stdout.read().strip()
1295 retdata = p.stdout.read().strip()
1297 # wait for the child to exit to avoid race condition.
1296 # wait for the child to exit to avoid race condition.
1298 p.wait()
1297 p.wait()
1299
1298
1300 if p.returncode != 0 and p.returncode != 1:
1299 if p.returncode != 0 and p.returncode != 1:
1301 # there are certain error codes that are ok
1300 # there are certain error codes that are ok
1302 command = commands[0]
1301 command = commands[0]
1303 if command in ('cat-file', 'symbolic-ref'):
1302 if command in ('cat-file', 'symbolic-ref'):
1304 return retdata, p.returncode
1303 return retdata, p.returncode
1305 # for all others, abort
1304 # for all others, abort
1306 raise error.Abort(_('git %s error %d in %s') %
1305 raise error.Abort(_('git %s error %d in %s') %
1307 (command, p.returncode, self._relpath))
1306 (command, p.returncode, self._relpath))
1308
1307
1309 return retdata, p.returncode
1308 return retdata, p.returncode
1310
1309
1311 def _gitmissing(self):
1310 def _gitmissing(self):
1312 return not self.wvfs.exists('.git')
1311 return not self.wvfs.exists('.git')
1313
1312
1314 def _gitstate(self):
1313 def _gitstate(self):
1315 return self._gitcommand(['rev-parse', 'HEAD'])
1314 return self._gitcommand(['rev-parse', 'HEAD'])
1316
1315
1317 def _gitcurrentbranch(self):
1316 def _gitcurrentbranch(self):
1318 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1317 current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet'])
1319 if err:
1318 if err:
1320 current = None
1319 current = None
1321 return current
1320 return current
1322
1321
1323 def _gitremote(self, remote):
1322 def _gitremote(self, remote):
1324 out = self._gitcommand(['remote', 'show', '-n', remote])
1323 out = self._gitcommand(['remote', 'show', '-n', remote])
1325 line = out.split('\n')[1]
1324 line = out.split('\n')[1]
1326 i = line.index('URL: ') + len('URL: ')
1325 i = line.index('URL: ') + len('URL: ')
1327 return line[i:]
1326 return line[i:]
1328
1327
1329 def _githavelocally(self, revision):
1328 def _githavelocally(self, revision):
1330 out, code = self._gitdir(['cat-file', '-e', revision])
1329 out, code = self._gitdir(['cat-file', '-e', revision])
1331 return code == 0
1330 return code == 0
1332
1331
1333 def _gitisancestor(self, r1, r2):
1332 def _gitisancestor(self, r1, r2):
1334 base = self._gitcommand(['merge-base', r1, r2])
1333 base = self._gitcommand(['merge-base', r1, r2])
1335 return base == r1
1334 return base == r1
1336
1335
1337 def _gitisbare(self):
1336 def _gitisbare(self):
1338 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1337 return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
1339
1338
1340 def _gitupdatestat(self):
1339 def _gitupdatestat(self):
1341 """This must be run before git diff-index.
1340 """This must be run before git diff-index.
1342 diff-index only looks at changes to file stat;
1341 diff-index only looks at changes to file stat;
1343 this command looks at file contents and updates the stat."""
1342 this command looks at file contents and updates the stat."""
1344 self._gitcommand(['update-index', '-q', '--refresh'])
1343 self._gitcommand(['update-index', '-q', '--refresh'])
1345
1344
1346 def _gitbranchmap(self):
1345 def _gitbranchmap(self):
1347 '''returns 2 things:
1346 '''returns 2 things:
1348 a map from git branch to revision
1347 a map from git branch to revision
1349 a map from revision to branches'''
1348 a map from revision to branches'''
1350 branch2rev = {}
1349 branch2rev = {}
1351 rev2branch = {}
1350 rev2branch = {}
1352
1351
1353 out = self._gitcommand(['for-each-ref', '--format',
1352 out = self._gitcommand(['for-each-ref', '--format',
1354 '%(objectname) %(refname)'])
1353 '%(objectname) %(refname)'])
1355 for line in out.split('\n'):
1354 for line in out.split('\n'):
1356 revision, ref = line.split(' ')
1355 revision, ref = line.split(' ')
1357 if (not ref.startswith('refs/heads/') and
1356 if (not ref.startswith('refs/heads/') and
1358 not ref.startswith('refs/remotes/')):
1357 not ref.startswith('refs/remotes/')):
1359 continue
1358 continue
1360 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1359 if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'):
1361 continue # ignore remote/HEAD redirects
1360 continue # ignore remote/HEAD redirects
1362 branch2rev[ref] = revision
1361 branch2rev[ref] = revision
1363 rev2branch.setdefault(revision, []).append(ref)
1362 rev2branch.setdefault(revision, []).append(ref)
1364 return branch2rev, rev2branch
1363 return branch2rev, rev2branch
1365
1364
1366 def _gittracking(self, branches):
1365 def _gittracking(self, branches):
1367 'return map of remote branch to local tracking branch'
1366 'return map of remote branch to local tracking branch'
1368 # assumes no more than one local tracking branch for each remote
1367 # assumes no more than one local tracking branch for each remote
1369 tracking = {}
1368 tracking = {}
1370 for b in branches:
1369 for b in branches:
1371 if b.startswith('refs/remotes/'):
1370 if b.startswith('refs/remotes/'):
1372 continue
1371 continue
1373 bname = b.split('/', 2)[2]
1372 bname = b.split('/', 2)[2]
1374 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1373 remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
1375 if remote:
1374 if remote:
1376 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1375 ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
1377 tracking['refs/remotes/%s/%s' %
1376 tracking['refs/remotes/%s/%s' %
1378 (remote, ref.split('/', 2)[2])] = b
1377 (remote, ref.split('/', 2)[2])] = b
1379 return tracking
1378 return tracking
1380
1379
1381 def _abssource(self, source):
1380 def _abssource(self, source):
1382 if '://' not in source:
1381 if '://' not in source:
1383 # recognize the scp syntax as an absolute source
1382 # recognize the scp syntax as an absolute source
1384 colon = source.find(':')
1383 colon = source.find(':')
1385 if colon != -1 and '/' not in source[:colon]:
1384 if colon != -1 and '/' not in source[:colon]:
1386 return source
1385 return source
1387 self._subsource = source
1386 self._subsource = source
1388 return _abssource(self)
1387 return _abssource(self)
1389
1388
1390 def _fetch(self, source, revision):
1389 def _fetch(self, source, revision):
1391 if self._gitmissing():
1390 if self._gitmissing():
1392 # SEC: check for safe ssh url
1391 # SEC: check for safe ssh url
1393 util.checksafessh(source)
1392 util.checksafessh(source)
1394
1393
1395 source = self._abssource(source)
1394 source = self._abssource(source)
1396 self.ui.status(_('cloning subrepo %s from %s\n') %
1395 self.ui.status(_('cloning subrepo %s from %s\n') %
1397 (self._relpath, source))
1396 (self._relpath, source))
1398 self._gitnodir(['clone', source, self._abspath])
1397 self._gitnodir(['clone', source, self._abspath])
1399 if self._githavelocally(revision):
1398 if self._githavelocally(revision):
1400 return
1399 return
1401 self.ui.status(_('pulling subrepo %s from %s\n') %
1400 self.ui.status(_('pulling subrepo %s from %s\n') %
1402 (self._relpath, self._gitremote('origin')))
1401 (self._relpath, self._gitremote('origin')))
1403 # try only origin: the originally cloned repo
1402 # try only origin: the originally cloned repo
1404 self._gitcommand(['fetch'])
1403 self._gitcommand(['fetch'])
1405 if not self._githavelocally(revision):
1404 if not self._githavelocally(revision):
1406 raise error.Abort(_('revision %s does not exist in subrepository '
1405 raise error.Abort(_('revision %s does not exist in subrepository '
1407 '"%s"\n') % (revision, self._relpath))
1406 '"%s"\n') % (revision, self._relpath))
1408
1407
1409 @annotatesubrepoerror
1408 @annotatesubrepoerror
1410 def dirty(self, ignoreupdate=False, missing=False):
1409 def dirty(self, ignoreupdate=False, missing=False):
1411 if self._gitmissing():
1410 if self._gitmissing():
1412 return self._state[1] != ''
1411 return self._state[1] != ''
1413 if self._gitisbare():
1412 if self._gitisbare():
1414 return True
1413 return True
1415 if not ignoreupdate and self._state[1] != self._gitstate():
1414 if not ignoreupdate and self._state[1] != self._gitstate():
1416 # different version checked out
1415 # different version checked out
1417 return True
1416 return True
1418 # check for staged changes or modified files; ignore untracked files
1417 # check for staged changes or modified files; ignore untracked files
1419 self._gitupdatestat()
1418 self._gitupdatestat()
1420 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1419 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1421 return code == 1
1420 return code == 1
1422
1421
1423 def basestate(self):
1422 def basestate(self):
1424 return self._gitstate()
1423 return self._gitstate()
1425
1424
1426 @annotatesubrepoerror
1425 @annotatesubrepoerror
1427 def get(self, state, overwrite=False):
1426 def get(self, state, overwrite=False):
1428 source, revision, kind = state
1427 source, revision, kind = state
1429 if not revision:
1428 if not revision:
1430 self.remove()
1429 self.remove()
1431 return
1430 return
1432 self._fetch(source, revision)
1431 self._fetch(source, revision)
1433 # if the repo was set to be bare, unbare it
1432 # if the repo was set to be bare, unbare it
1434 if self._gitisbare():
1433 if self._gitisbare():
1435 self._gitcommand(['config', 'core.bare', 'false'])
1434 self._gitcommand(['config', 'core.bare', 'false'])
1436 if self._gitstate() == revision:
1435 if self._gitstate() == revision:
1437 self._gitcommand(['reset', '--hard', 'HEAD'])
1436 self._gitcommand(['reset', '--hard', 'HEAD'])
1438 return
1437 return
1439 elif self._gitstate() == revision:
1438 elif self._gitstate() == revision:
1440 if overwrite:
1439 if overwrite:
1441 # first reset the index to unmark new files for commit, because
1440 # first reset the index to unmark new files for commit, because
1442 # reset --hard will otherwise throw away files added for commit,
1441 # reset --hard will otherwise throw away files added for commit,
1443 # not just unmark them.
1442 # not just unmark them.
1444 self._gitcommand(['reset', 'HEAD'])
1443 self._gitcommand(['reset', 'HEAD'])
1445 self._gitcommand(['reset', '--hard', 'HEAD'])
1444 self._gitcommand(['reset', '--hard', 'HEAD'])
1446 return
1445 return
1447 branch2rev, rev2branch = self._gitbranchmap()
1446 branch2rev, rev2branch = self._gitbranchmap()
1448
1447
1449 def checkout(args):
1448 def checkout(args):
1450 cmd = ['checkout']
1449 cmd = ['checkout']
1451 if overwrite:
1450 if overwrite:
1452 # first reset the index to unmark new files for commit, because
1451 # first reset the index to unmark new files for commit, because
1453 # the -f option will otherwise throw away files added for
1452 # the -f option will otherwise throw away files added for
1454 # commit, not just unmark them.
1453 # commit, not just unmark them.
1455 self._gitcommand(['reset', 'HEAD'])
1454 self._gitcommand(['reset', 'HEAD'])
1456 cmd.append('-f')
1455 cmd.append('-f')
1457 self._gitcommand(cmd + args)
1456 self._gitcommand(cmd + args)
1458 _sanitize(self.ui, self.wvfs, '.git')
1457 _sanitize(self.ui, self.wvfs, '.git')
1459
1458
1460 def rawcheckout():
1459 def rawcheckout():
1461 # no branch to checkout, check it out with no branch
1460 # no branch to checkout, check it out with no branch
1462 self.ui.warn(_('checking out detached HEAD in '
1461 self.ui.warn(_('checking out detached HEAD in '
1463 'subrepository "%s"\n') % self._relpath)
1462 'subrepository "%s"\n') % self._relpath)
1464 self.ui.warn(_('check out a git branch if you intend '
1463 self.ui.warn(_('check out a git branch if you intend '
1465 'to make changes\n'))
1464 'to make changes\n'))
1466 checkout(['-q', revision])
1465 checkout(['-q', revision])
1467
1466
1468 if revision not in rev2branch:
1467 if revision not in rev2branch:
1469 rawcheckout()
1468 rawcheckout()
1470 return
1469 return
1471 branches = rev2branch[revision]
1470 branches = rev2branch[revision]
1472 firstlocalbranch = None
1471 firstlocalbranch = None
1473 for b in branches:
1472 for b in branches:
1474 if b == 'refs/heads/master':
1473 if b == 'refs/heads/master':
1475 # master trumps all other branches
1474 # master trumps all other branches
1476 checkout(['refs/heads/master'])
1475 checkout(['refs/heads/master'])
1477 return
1476 return
1478 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1477 if not firstlocalbranch and not b.startswith('refs/remotes/'):
1479 firstlocalbranch = b
1478 firstlocalbranch = b
1480 if firstlocalbranch:
1479 if firstlocalbranch:
1481 checkout([firstlocalbranch])
1480 checkout([firstlocalbranch])
1482 return
1481 return
1483
1482
1484 tracking = self._gittracking(branch2rev.keys())
1483 tracking = self._gittracking(branch2rev.keys())
1485 # choose a remote branch already tracked if possible
1484 # choose a remote branch already tracked if possible
1486 remote = branches[0]
1485 remote = branches[0]
1487 if remote not in tracking:
1486 if remote not in tracking:
1488 for b in branches:
1487 for b in branches:
1489 if b in tracking:
1488 if b in tracking:
1490 remote = b
1489 remote = b
1491 break
1490 break
1492
1491
1493 if remote not in tracking:
1492 if remote not in tracking:
1494 # create a new local tracking branch
1493 # create a new local tracking branch
1495 local = remote.split('/', 3)[3]
1494 local = remote.split('/', 3)[3]
1496 checkout(['-b', local, remote])
1495 checkout(['-b', local, remote])
1497 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1496 elif self._gitisancestor(branch2rev[tracking[remote]], remote):
1498 # When updating to a tracked remote branch,
1497 # When updating to a tracked remote branch,
1499 # if the local tracking branch is downstream of it,
1498 # if the local tracking branch is downstream of it,
1500 # a normal `git pull` would have performed a "fast-forward merge"
1499 # a normal `git pull` would have performed a "fast-forward merge"
1501 # which is equivalent to updating the local branch to the remote.
1500 # which is equivalent to updating the local branch to the remote.
1502 # Since we are only looking at branching at update, we need to
1501 # Since we are only looking at branching at update, we need to
1503 # detect this situation and perform this action lazily.
1502 # detect this situation and perform this action lazily.
1504 if tracking[remote] != self._gitcurrentbranch():
1503 if tracking[remote] != self._gitcurrentbranch():
1505 checkout([tracking[remote]])
1504 checkout([tracking[remote]])
1506 self._gitcommand(['merge', '--ff', remote])
1505 self._gitcommand(['merge', '--ff', remote])
1507 _sanitize(self.ui, self.wvfs, '.git')
1506 _sanitize(self.ui, self.wvfs, '.git')
1508 else:
1507 else:
1509 # a real merge would be required, just checkout the revision
1508 # a real merge would be required, just checkout the revision
1510 rawcheckout()
1509 rawcheckout()
1511
1510
1512 @annotatesubrepoerror
1511 @annotatesubrepoerror
1513 def commit(self, text, user, date):
1512 def commit(self, text, user, date):
1514 if self._gitmissing():
1513 if self._gitmissing():
1515 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1514 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1516 cmd = ['commit', '-a', '-m', text]
1515 cmd = ['commit', '-a', '-m', text]
1517 env = encoding.environ.copy()
1516 env = encoding.environ.copy()
1518 if user:
1517 if user:
1519 cmd += ['--author', user]
1518 cmd += ['--author', user]
1520 if date:
1519 if date:
1521 # git's date parser silently ignores when seconds < 1e9
1520 # git's date parser silently ignores when seconds < 1e9
1522 # convert to ISO8601
1521 # convert to ISO8601
1523 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1522 env['GIT_AUTHOR_DATE'] = dateutil.datestr(date,
1524 '%Y-%m-%dT%H:%M:%S %1%2')
1523 '%Y-%m-%dT%H:%M:%S %1%2')
1525 self._gitcommand(cmd, env=env)
1524 self._gitcommand(cmd, env=env)
1526 # make sure commit works otherwise HEAD might not exist under certain
1525 # make sure commit works otherwise HEAD might not exist under certain
1527 # circumstances
1526 # circumstances
1528 return self._gitstate()
1527 return self._gitstate()
1529
1528
1530 @annotatesubrepoerror
1529 @annotatesubrepoerror
1531 def merge(self, state):
1530 def merge(self, state):
1532 source, revision, kind = state
1531 source, revision, kind = state
1533 self._fetch(source, revision)
1532 self._fetch(source, revision)
1534 base = self._gitcommand(['merge-base', revision, self._state[1]])
1533 base = self._gitcommand(['merge-base', revision, self._state[1]])
1535 self._gitupdatestat()
1534 self._gitupdatestat()
1536 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1535 out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
1537
1536
1538 def mergefunc():
1537 def mergefunc():
1539 if base == revision:
1538 if base == revision:
1540 self.get(state) # fast forward merge
1539 self.get(state) # fast forward merge
1541 elif base != self._state[1]:
1540 elif base != self._state[1]:
1542 self._gitcommand(['merge', '--no-commit', revision])
1541 self._gitcommand(['merge', '--no-commit', revision])
1543 _sanitize(self.ui, self.wvfs, '.git')
1542 _sanitize(self.ui, self.wvfs, '.git')
1544
1543
1545 if self.dirty():
1544 if self.dirty():
1546 if self._gitstate() != revision:
1545 if self._gitstate() != revision:
1547 dirty = self._gitstate() == self._state[1] or code != 0
1546 dirty = self._gitstate() == self._state[1] or code != 0
1548 if _updateprompt(self.ui, self, dirty,
1547 if _updateprompt(self.ui, self, dirty,
1549 self._state[1][:7], revision[:7]):
1548 self._state[1][:7], revision[:7]):
1550 mergefunc()
1549 mergefunc()
1551 else:
1550 else:
1552 mergefunc()
1551 mergefunc()
1553
1552
1554 @annotatesubrepoerror
1553 @annotatesubrepoerror
1555 def push(self, opts):
1554 def push(self, opts):
1556 force = opts.get('force')
1555 force = opts.get('force')
1557
1556
1558 if not self._state[1]:
1557 if not self._state[1]:
1559 return True
1558 return True
1560 if self._gitmissing():
1559 if self._gitmissing():
1561 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1560 raise error.Abort(_("subrepo %s is missing") % self._relpath)
1562 # if a branch in origin contains the revision, nothing to do
1561 # if a branch in origin contains the revision, nothing to do
1563 branch2rev, rev2branch = self._gitbranchmap()
1562 branch2rev, rev2branch = self._gitbranchmap()
1564 if self._state[1] in rev2branch:
1563 if self._state[1] in rev2branch:
1565 for b in rev2branch[self._state[1]]:
1564 for b in rev2branch[self._state[1]]:
1566 if b.startswith('refs/remotes/origin/'):
1565 if b.startswith('refs/remotes/origin/'):
1567 return True
1566 return True
1568 for b, revision in branch2rev.iteritems():
1567 for b, revision in branch2rev.iteritems():
1569 if b.startswith('refs/remotes/origin/'):
1568 if b.startswith('refs/remotes/origin/'):
1570 if self._gitisancestor(self._state[1], revision):
1569 if self._gitisancestor(self._state[1], revision):
1571 return True
1570 return True
1572 # otherwise, try to push the currently checked out branch
1571 # otherwise, try to push the currently checked out branch
1573 cmd = ['push']
1572 cmd = ['push']
1574 if force:
1573 if force:
1575 cmd.append('--force')
1574 cmd.append('--force')
1576
1575
1577 current = self._gitcurrentbranch()
1576 current = self._gitcurrentbranch()
1578 if current:
1577 if current:
1579 # determine if the current branch is even useful
1578 # determine if the current branch is even useful
1580 if not self._gitisancestor(self._state[1], current):
1579 if not self._gitisancestor(self._state[1], current):
1581 self.ui.warn(_('unrelated git branch checked out '
1580 self.ui.warn(_('unrelated git branch checked out '
1582 'in subrepository "%s"\n') % self._relpath)
1581 'in subrepository "%s"\n') % self._relpath)
1583 return False
1582 return False
1584 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1583 self.ui.status(_('pushing branch %s of subrepository "%s"\n') %
1585 (current.split('/', 2)[2], self._relpath))
1584 (current.split('/', 2)[2], self._relpath))
1586 ret = self._gitdir(cmd + ['origin', current])
1585 ret = self._gitdir(cmd + ['origin', current])
1587 return ret[1] == 0
1586 return ret[1] == 0
1588 else:
1587 else:
1589 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1588 self.ui.warn(_('no branch checked out in subrepository "%s"\n'
1590 'cannot push revision %s\n') %
1589 'cannot push revision %s\n') %
1591 (self._relpath, self._state[1]))
1590 (self._relpath, self._state[1]))
1592 return False
1591 return False
1593
1592
1594 @annotatesubrepoerror
1593 @annotatesubrepoerror
1595 def add(self, ui, match, prefix, explicitonly, **opts):
1594 def add(self, ui, match, prefix, explicitonly, **opts):
1596 if self._gitmissing():
1595 if self._gitmissing():
1597 return []
1596 return []
1598
1597
1599 s = self.status(None, unknown=True, clean=True)
1598 s = self.status(None, unknown=True, clean=True)
1600
1599
1601 tracked = set()
1600 tracked = set()
1602 # dirstates 'amn' warn, 'r' is added again
1601 # dirstates 'amn' warn, 'r' is added again
1603 for l in (s.modified, s.added, s.deleted, s.clean):
1602 for l in (s.modified, s.added, s.deleted, s.clean):
1604 tracked.update(l)
1603 tracked.update(l)
1605
1604
1606 # Unknown files not of interest will be rejected by the matcher
1605 # Unknown files not of interest will be rejected by the matcher
1607 files = s.unknown
1606 files = s.unknown
1608 files.extend(match.files())
1607 files.extend(match.files())
1609
1608
1610 rejected = []
1609 rejected = []
1611
1610
1612 files = [f for f in sorted(set(files)) if match(f)]
1611 files = [f for f in sorted(set(files)) if match(f)]
1613 for f in files:
1612 for f in files:
1614 exact = match.exact(f)
1613 exact = match.exact(f)
1615 command = ["add"]
1614 command = ["add"]
1616 if exact:
1615 if exact:
1617 command.append("-f") #should be added, even if ignored
1616 command.append("-f") #should be added, even if ignored
1618 if ui.verbose or not exact:
1617 if ui.verbose or not exact:
1619 ui.status(_('adding %s\n') % match.rel(f))
1618 ui.status(_('adding %s\n') % match.rel(f))
1620
1619
1621 if f in tracked: # hg prints 'adding' even if already tracked
1620 if f in tracked: # hg prints 'adding' even if already tracked
1622 if exact:
1621 if exact:
1623 rejected.append(f)
1622 rejected.append(f)
1624 continue
1623 continue
1625 if not opts.get(r'dry_run'):
1624 if not opts.get(r'dry_run'):
1626 self._gitcommand(command + [f])
1625 self._gitcommand(command + [f])
1627
1626
1628 for f in rejected:
1627 for f in rejected:
1629 ui.warn(_("%s already tracked!\n") % match.abs(f))
1628 ui.warn(_("%s already tracked!\n") % match.abs(f))
1630
1629
1631 return rejected
1630 return rejected
1632
1631
1633 @annotatesubrepoerror
1632 @annotatesubrepoerror
1634 def remove(self):
1633 def remove(self):
1635 if self._gitmissing():
1634 if self._gitmissing():
1636 return
1635 return
1637 if self.dirty():
1636 if self.dirty():
1638 self.ui.warn(_('not removing repo %s because '
1637 self.ui.warn(_('not removing repo %s because '
1639 'it has changes.\n') % self._relpath)
1638 'it has changes.\n') % self._relpath)
1640 return
1639 return
1641 # we can't fully delete the repository as it may contain
1640 # we can't fully delete the repository as it may contain
1642 # local-only history
1641 # local-only history
1643 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1642 self.ui.note(_('removing subrepo %s\n') % self._relpath)
1644 self._gitcommand(['config', 'core.bare', 'true'])
1643 self._gitcommand(['config', 'core.bare', 'true'])
1645 for f, kind in self.wvfs.readdir():
1644 for f, kind in self.wvfs.readdir():
1646 if f == '.git':
1645 if f == '.git':
1647 continue
1646 continue
1648 if kind == stat.S_IFDIR:
1647 if kind == stat.S_IFDIR:
1649 self.wvfs.rmtree(f)
1648 self.wvfs.rmtree(f)
1650 else:
1649 else:
1651 self.wvfs.unlink(f)
1650 self.wvfs.unlink(f)
1652
1651
1653 def archive(self, archiver, prefix, match=None, decode=True):
1652 def archive(self, archiver, prefix, match=None, decode=True):
1654 total = 0
1653 total = 0
1655 source, revision = self._state
1654 source, revision = self._state
1656 if not revision:
1655 if not revision:
1657 return total
1656 return total
1658 self._fetch(source, revision)
1657 self._fetch(source, revision)
1659
1658
1660 # Parse git's native archive command.
1659 # Parse git's native archive command.
1661 # This should be much faster than manually traversing the trees
1660 # This should be much faster than manually traversing the trees
1662 # and objects with many subprocess calls.
1661 # and objects with many subprocess calls.
1663 tarstream = self._gitcommand(['archive', revision], stream=True)
1662 tarstream = self._gitcommand(['archive', revision], stream=True)
1664 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1663 tar = tarfile.open(fileobj=tarstream, mode=r'r|')
1665 relpath = subrelpath(self)
1664 relpath = subrelpath(self)
1666 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1665 progress = self.ui.makeprogress(_('archiving (%s)') % relpath,
1667 unit=_('files'))
1666 unit=_('files'))
1668 progress.update(0)
1667 progress.update(0)
1669 for info in tar:
1668 for info in tar:
1670 if info.isdir():
1669 if info.isdir():
1671 continue
1670 continue
1672 bname = pycompat.fsencode(info.name)
1671 bname = pycompat.fsencode(info.name)
1673 if match and not match(bname):
1672 if match and not match(bname):
1674 continue
1673 continue
1675 if info.issym():
1674 if info.issym():
1676 data = info.linkname
1675 data = info.linkname
1677 else:
1676 else:
1678 data = tar.extractfile(info).read()
1677 data = tar.extractfile(info).read()
1679 archiver.addfile(prefix + self._path + '/' + bname,
1678 archiver.addfile(prefix + self._path + '/' + bname,
1680 info.mode, info.issym(), data)
1679 info.mode, info.issym(), data)
1681 total += 1
1680 total += 1
1682 progress.increment()
1681 progress.increment()
1683 progress.complete()
1682 progress.complete()
1684 return total
1683 return total
1685
1684
1686
1685
1687 @annotatesubrepoerror
1686 @annotatesubrepoerror
1688 def cat(self, match, fm, fntemplate, prefix, **opts):
1687 def cat(self, match, fm, fntemplate, prefix, **opts):
1689 rev = self._state[1]
1688 rev = self._state[1]
1690 if match.anypats():
1689 if match.anypats():
1691 return 1 #No support for include/exclude yet
1690 return 1 #No support for include/exclude yet
1692
1691
1693 if not match.files():
1692 if not match.files():
1694 return 1
1693 return 1
1695
1694
1696 # TODO: add support for non-plain formatter (see cmdutil.cat())
1695 # TODO: add support for non-plain formatter (see cmdutil.cat())
1697 for f in match.files():
1696 for f in match.files():
1698 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1697 output = self._gitcommand(["show", "%s:%s" % (rev, f)])
1699 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1698 fp = cmdutil.makefileobj(self._ctx, fntemplate,
1700 pathname=self.wvfs.reljoin(prefix, f))
1699 pathname=self.wvfs.reljoin(prefix, f))
1701 fp.write(output)
1700 fp.write(output)
1702 fp.close()
1701 fp.close()
1703 return 0
1702 return 0
1704
1703
1705
1704
1706 @annotatesubrepoerror
1705 @annotatesubrepoerror
1707 def status(self, rev2, **opts):
1706 def status(self, rev2, **opts):
1708 rev1 = self._state[1]
1707 rev1 = self._state[1]
1709 if self._gitmissing() or not rev1:
1708 if self._gitmissing() or not rev1:
1710 # if the repo is missing, return no results
1709 # if the repo is missing, return no results
1711 return scmutil.status([], [], [], [], [], [], [])
1710 return scmutil.status([], [], [], [], [], [], [])
1712 modified, added, removed = [], [], []
1711 modified, added, removed = [], [], []
1713 self._gitupdatestat()
1712 self._gitupdatestat()
1714 if rev2:
1713 if rev2:
1715 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1714 command = ['diff-tree', '--no-renames', '-r', rev1, rev2]
1716 else:
1715 else:
1717 command = ['diff-index', '--no-renames', rev1]
1716 command = ['diff-index', '--no-renames', rev1]
1718 out = self._gitcommand(command)
1717 out = self._gitcommand(command)
1719 for line in out.split('\n'):
1718 for line in out.split('\n'):
1720 tab = line.find('\t')
1719 tab = line.find('\t')
1721 if tab == -1:
1720 if tab == -1:
1722 continue
1721 continue
1723 status, f = line[tab - 1:tab], line[tab + 1:]
1722 status, f = line[tab - 1:tab], line[tab + 1:]
1724 if status == 'M':
1723 if status == 'M':
1725 modified.append(f)
1724 modified.append(f)
1726 elif status == 'A':
1725 elif status == 'A':
1727 added.append(f)
1726 added.append(f)
1728 elif status == 'D':
1727 elif status == 'D':
1729 removed.append(f)
1728 removed.append(f)
1730
1729
1731 deleted, unknown, ignored, clean = [], [], [], []
1730 deleted, unknown, ignored, clean = [], [], [], []
1732
1731
1733 command = ['status', '--porcelain', '-z']
1732 command = ['status', '--porcelain', '-z']
1734 if opts.get(r'unknown'):
1733 if opts.get(r'unknown'):
1735 command += ['--untracked-files=all']
1734 command += ['--untracked-files=all']
1736 if opts.get(r'ignored'):
1735 if opts.get(r'ignored'):
1737 command += ['--ignored']
1736 command += ['--ignored']
1738 out = self._gitcommand(command)
1737 out = self._gitcommand(command)
1739
1738
1740 changedfiles = set()
1739 changedfiles = set()
1741 changedfiles.update(modified)
1740 changedfiles.update(modified)
1742 changedfiles.update(added)
1741 changedfiles.update(added)
1743 changedfiles.update(removed)
1742 changedfiles.update(removed)
1744 for line in out.split('\0'):
1743 for line in out.split('\0'):
1745 if not line:
1744 if not line:
1746 continue
1745 continue
1747 st = line[0:2]
1746 st = line[0:2]
1748 #moves and copies show 2 files on one line
1747 #moves and copies show 2 files on one line
1749 if line.find('\0') >= 0:
1748 if line.find('\0') >= 0:
1750 filename1, filename2 = line[3:].split('\0')
1749 filename1, filename2 = line[3:].split('\0')
1751 else:
1750 else:
1752 filename1 = line[3:]
1751 filename1 = line[3:]
1753 filename2 = None
1752 filename2 = None
1754
1753
1755 changedfiles.add(filename1)
1754 changedfiles.add(filename1)
1756 if filename2:
1755 if filename2:
1757 changedfiles.add(filename2)
1756 changedfiles.add(filename2)
1758
1757
1759 if st == '??':
1758 if st == '??':
1760 unknown.append(filename1)
1759 unknown.append(filename1)
1761 elif st == '!!':
1760 elif st == '!!':
1762 ignored.append(filename1)
1761 ignored.append(filename1)
1763
1762
1764 if opts.get(r'clean'):
1763 if opts.get(r'clean'):
1765 out = self._gitcommand(['ls-files'])
1764 out = self._gitcommand(['ls-files'])
1766 for f in out.split('\n'):
1765 for f in out.split('\n'):
1767 if not f in changedfiles:
1766 if not f in changedfiles:
1768 clean.append(f)
1767 clean.append(f)
1769
1768
1770 return scmutil.status(modified, added, removed, deleted,
1769 return scmutil.status(modified, added, removed, deleted,
1771 unknown, ignored, clean)
1770 unknown, ignored, clean)
1772
1771
1773 @annotatesubrepoerror
1772 @annotatesubrepoerror
1774 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1773 def diff(self, ui, diffopts, node2, match, prefix, **opts):
1775 node1 = self._state[1]
1774 node1 = self._state[1]
1776 cmd = ['diff', '--no-renames']
1775 cmd = ['diff', '--no-renames']
1777 if opts[r'stat']:
1776 if opts[r'stat']:
1778 cmd.append('--stat')
1777 cmd.append('--stat')
1779 else:
1778 else:
1780 # for Git, this also implies '-p'
1779 # for Git, this also implies '-p'
1781 cmd.append('-U%d' % diffopts.context)
1780 cmd.append('-U%d' % diffopts.context)
1782
1781
1783 gitprefix = self.wvfs.reljoin(prefix, self._path)
1782 gitprefix = self.wvfs.reljoin(prefix, self._path)
1784
1783
1785 if diffopts.noprefix:
1784 if diffopts.noprefix:
1786 cmd.extend(['--src-prefix=%s/' % gitprefix,
1785 cmd.extend(['--src-prefix=%s/' % gitprefix,
1787 '--dst-prefix=%s/' % gitprefix])
1786 '--dst-prefix=%s/' % gitprefix])
1788 else:
1787 else:
1789 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1788 cmd.extend(['--src-prefix=a/%s/' % gitprefix,
1790 '--dst-prefix=b/%s/' % gitprefix])
1789 '--dst-prefix=b/%s/' % gitprefix])
1791
1790
1792 if diffopts.ignorews:
1791 if diffopts.ignorews:
1793 cmd.append('--ignore-all-space')
1792 cmd.append('--ignore-all-space')
1794 if diffopts.ignorewsamount:
1793 if diffopts.ignorewsamount:
1795 cmd.append('--ignore-space-change')
1794 cmd.append('--ignore-space-change')
1796 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1795 if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \
1797 and diffopts.ignoreblanklines:
1796 and diffopts.ignoreblanklines:
1798 cmd.append('--ignore-blank-lines')
1797 cmd.append('--ignore-blank-lines')
1799
1798
1800 cmd.append(node1)
1799 cmd.append(node1)
1801 if node2:
1800 if node2:
1802 cmd.append(node2)
1801 cmd.append(node2)
1803
1802
1804 output = ""
1803 output = ""
1805 if match.always():
1804 if match.always():
1806 output += self._gitcommand(cmd) + '\n'
1805 output += self._gitcommand(cmd) + '\n'
1807 else:
1806 else:
1808 st = self.status(node2)[:3]
1807 st = self.status(node2)[:3]
1809 files = [f for sublist in st for f in sublist]
1808 files = [f for sublist in st for f in sublist]
1810 for f in files:
1809 for f in files:
1811 if match(f):
1810 if match(f):
1812 output += self._gitcommand(cmd + ['--', f]) + '\n'
1811 output += self._gitcommand(cmd + ['--', f]) + '\n'
1813
1812
1814 if output.strip():
1813 if output.strip():
1815 ui.write(output)
1814 ui.write(output)
1816
1815
1817 @annotatesubrepoerror
1816 @annotatesubrepoerror
1818 def revert(self, substate, *pats, **opts):
1817 def revert(self, substate, *pats, **opts):
1819 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1818 self.ui.status(_('reverting subrepo %s\n') % substate[0])
1820 if not opts.get(r'no_backup'):
1819 if not opts.get(r'no_backup'):
1821 status = self.status(None)
1820 status = self.status(None)
1822 names = status.modified
1821 names = status.modified
1823 for name in names:
1822 for name in names:
1824 # backuppath() expects a path relative to the parent repo (the
1823 # backuppath() expects a path relative to the parent repo (the
1825 # repo that ui.origbackuppath is relative to)
1824 # repo that ui.origbackuppath is relative to)
1826 parentname = os.path.join(self._path, name)
1825 parentname = os.path.join(self._path, name)
1827 bakname = scmutil.backuppath(self.ui, self._subparent,
1826 bakname = scmutil.backuppath(self.ui, self._subparent,
1828 parentname)
1827 parentname)
1829 self.ui.note(_('saving current version of %s as %s\n') %
1828 self.ui.note(_('saving current version of %s as %s\n') %
1830 (name, os.path.relpath(bakname)))
1829 (name, os.path.relpath(bakname)))
1831 util.rename(self.wvfs.join(name), bakname)
1830 util.rename(self.wvfs.join(name), bakname)
1832
1831
1833 if not opts.get(r'dry_run'):
1832 if not opts.get(r'dry_run'):
1834 self.get(substate, overwrite=True)
1833 self.get(substate, overwrite=True)
1835 return []
1834 return []
1836
1835
1837 def shortid(self, revid):
1836 def shortid(self, revid):
1838 return revid[:7]
1837 return revid[:7]
1839
1838
1840 types = {
1839 types = {
1841 'hg': hgsubrepo,
1840 'hg': hgsubrepo,
1842 'svn': svnsubrepo,
1841 'svn': svnsubrepo,
1843 'git': gitsubrepo,
1842 'git': gitsubrepo,
1844 }
1843 }
General Comments 0
You need to be logged in to leave comments. Login now