##// END OF EJS Templates
extensions: add detailed loading information...
Martijn Pieters -
r38834:d5895867 default
parent child Browse files
Show More
@@ -1,766 +1,808 b''
1 # extensions.py - extension handling for mercurial
1 # extensions.py - extension handling for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import ast
10 import ast
11 import collections
11 import collections
12 import functools
12 import functools
13 import imp
13 import imp
14 import inspect
14 import inspect
15 import os
15 import os
16
16
17 from .i18n import (
17 from .i18n import (
18 _,
18 _,
19 gettext,
19 gettext,
20 )
20 )
21
21
22 from . import (
22 from . import (
23 cmdutil,
23 cmdutil,
24 configitems,
24 configitems,
25 error,
25 error,
26 pycompat,
26 pycompat,
27 util,
27 util,
28 )
28 )
29
29
30 from .utils import (
30 from .utils import (
31 stringutil,
31 stringutil,
32 )
32 )
33
33
34 _extensions = {}
34 _extensions = {}
35 _disabledextensions = {}
35 _disabledextensions = {}
36 _aftercallbacks = {}
36 _aftercallbacks = {}
37 _order = []
37 _order = []
38 _builtin = {
38 _builtin = {
39 'hbisect',
39 'hbisect',
40 'bookmarks',
40 'bookmarks',
41 'color',
41 'color',
42 'parentrevspec',
42 'parentrevspec',
43 'progress',
43 'progress',
44 'interhg',
44 'interhg',
45 'inotify',
45 'inotify',
46 'hgcia'
46 'hgcia'
47 }
47 }
48
48
49 def extensions(ui=None):
49 def extensions(ui=None):
50 if ui:
50 if ui:
51 def enabled(name):
51 def enabled(name):
52 for format in ['%s', 'hgext.%s']:
52 for format in ['%s', 'hgext.%s']:
53 conf = ui.config('extensions', format % name)
53 conf = ui.config('extensions', format % name)
54 if conf is not None and not conf.startswith('!'):
54 if conf is not None and not conf.startswith('!'):
55 return True
55 return True
56 else:
56 else:
57 enabled = lambda name: True
57 enabled = lambda name: True
58 for name in _order:
58 for name in _order:
59 module = _extensions[name]
59 module = _extensions[name]
60 if module and enabled(name):
60 if module and enabled(name):
61 yield name, module
61 yield name, module
62
62
63 def find(name):
63 def find(name):
64 '''return module with given extension name'''
64 '''return module with given extension name'''
65 mod = None
65 mod = None
66 try:
66 try:
67 mod = _extensions[name]
67 mod = _extensions[name]
68 except KeyError:
68 except KeyError:
69 for k, v in _extensions.iteritems():
69 for k, v in _extensions.iteritems():
70 if k.endswith('.' + name) or k.endswith('/' + name):
70 if k.endswith('.' + name) or k.endswith('/' + name):
71 mod = v
71 mod = v
72 break
72 break
73 if not mod:
73 if not mod:
74 raise KeyError(name)
74 raise KeyError(name)
75 return mod
75 return mod
76
76
77 def loadpath(path, module_name):
77 def loadpath(path, module_name):
78 module_name = module_name.replace('.', '_')
78 module_name = module_name.replace('.', '_')
79 path = util.normpath(util.expandpath(path))
79 path = util.normpath(util.expandpath(path))
80 module_name = pycompat.fsdecode(module_name)
80 module_name = pycompat.fsdecode(module_name)
81 path = pycompat.fsdecode(path)
81 path = pycompat.fsdecode(path)
82 if os.path.isdir(path):
82 if os.path.isdir(path):
83 # module/__init__.py style
83 # module/__init__.py style
84 d, f = os.path.split(path)
84 d, f = os.path.split(path)
85 fd, fpath, desc = imp.find_module(f, [d])
85 fd, fpath, desc = imp.find_module(f, [d])
86 return imp.load_module(module_name, fd, fpath, desc)
86 return imp.load_module(module_name, fd, fpath, desc)
87 else:
87 else:
88 try:
88 try:
89 return imp.load_source(module_name, path)
89 return imp.load_source(module_name, path)
90 except IOError as exc:
90 except IOError as exc:
91 if not exc.filename:
91 if not exc.filename:
92 exc.filename = path # python does not fill this
92 exc.filename = path # python does not fill this
93 raise
93 raise
94
94
95 def _importh(name):
95 def _importh(name):
96 """import and return the <name> module"""
96 """import and return the <name> module"""
97 mod = __import__(pycompat.sysstr(name))
97 mod = __import__(pycompat.sysstr(name))
98 components = name.split('.')
98 components = name.split('.')
99 for comp in components[1:]:
99 for comp in components[1:]:
100 mod = getattr(mod, comp)
100 mod = getattr(mod, comp)
101 return mod
101 return mod
102
102
103 def _importext(name, path=None, reportfunc=None):
103 def _importext(name, path=None, reportfunc=None):
104 if path:
104 if path:
105 # the module will be loaded in sys.modules
105 # the module will be loaded in sys.modules
106 # choose an unique name so that it doesn't
106 # choose an unique name so that it doesn't
107 # conflicts with other modules
107 # conflicts with other modules
108 mod = loadpath(path, 'hgext.%s' % name)
108 mod = loadpath(path, 'hgext.%s' % name)
109 else:
109 else:
110 try:
110 try:
111 mod = _importh("hgext.%s" % name)
111 mod = _importh("hgext.%s" % name)
112 except ImportError as err:
112 except ImportError as err:
113 if reportfunc:
113 if reportfunc:
114 reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name)
114 reportfunc(err, "hgext.%s" % name, "hgext3rd.%s" % name)
115 try:
115 try:
116 mod = _importh("hgext3rd.%s" % name)
116 mod = _importh("hgext3rd.%s" % name)
117 except ImportError as err:
117 except ImportError as err:
118 if reportfunc:
118 if reportfunc:
119 reportfunc(err, "hgext3rd.%s" % name, name)
119 reportfunc(err, "hgext3rd.%s" % name, name)
120 mod = _importh(name)
120 mod = _importh(name)
121 return mod
121 return mod
122
122
123 def _reportimporterror(ui, err, failed, next):
123 def _reportimporterror(ui, err, failed, next):
124 # note: this ui.debug happens before --debug is processed,
124 # note: this ui.debug happens before --debug is processed,
125 # Use --config ui.debug=1 to see them.
125 # Use --config ui.debug=1 to see them.
126 if ui.configbool('devel', 'debug.extensions'):
126 if ui.configbool('devel', 'debug.extensions'):
127 ui.debug('could not import %s (%s): trying %s\n'
127 ui.debug('debug.extensions: - could not import %s (%s): trying %s\n'
128 % (failed, stringutil.forcebytestr(err), next))
128 % (failed, stringutil.forcebytestr(err), next))
129 if ui.debugflag:
129 if ui.debugflag:
130 ui.traceback()
130 ui.traceback()
131
131
132 def _rejectunicode(name, xs):
132 def _rejectunicode(name, xs):
133 if isinstance(xs, (list, set, tuple)):
133 if isinstance(xs, (list, set, tuple)):
134 for x in xs:
134 for x in xs:
135 _rejectunicode(name, x)
135 _rejectunicode(name, x)
136 elif isinstance(xs, dict):
136 elif isinstance(xs, dict):
137 for k, v in xs.items():
137 for k, v in xs.items():
138 _rejectunicode(name, k)
138 _rejectunicode(name, k)
139 _rejectunicode(b'%s.%s' % (name, stringutil.forcebytestr(k)), v)
139 _rejectunicode(b'%s.%s' % (name, stringutil.forcebytestr(k)), v)
140 elif isinstance(xs, type(u'')):
140 elif isinstance(xs, type(u'')):
141 raise error.ProgrammingError(b"unicode %r found in %s" % (xs, name),
141 raise error.ProgrammingError(b"unicode %r found in %s" % (xs, name),
142 hint="use b'' to make it byte string")
142 hint="use b'' to make it byte string")
143
143
144 # attributes set by registrar.command
144 # attributes set by registrar.command
145 _cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
145 _cmdfuncattrs = ('norepo', 'optionalrepo', 'inferrepo')
146
146
147 def _validatecmdtable(ui, cmdtable):
147 def _validatecmdtable(ui, cmdtable):
148 """Check if extension commands have required attributes"""
148 """Check if extension commands have required attributes"""
149 for c, e in cmdtable.iteritems():
149 for c, e in cmdtable.iteritems():
150 f = e[0]
150 f = e[0]
151 missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
151 missing = [a for a in _cmdfuncattrs if not util.safehasattr(f, a)]
152 if not missing:
152 if not missing:
153 continue
153 continue
154 raise error.ProgrammingError(
154 raise error.ProgrammingError(
155 'missing attributes: %s' % ', '.join(missing),
155 'missing attributes: %s' % ', '.join(missing),
156 hint="use @command decorator to register '%s'" % c)
156 hint="use @command decorator to register '%s'" % c)
157
157
158 def _validatetables(ui, mod):
158 def _validatetables(ui, mod):
159 """Sanity check for loadable tables provided by extension module"""
159 """Sanity check for loadable tables provided by extension module"""
160 for t in ['cmdtable', 'colortable', 'configtable']:
160 for t in ['cmdtable', 'colortable', 'configtable']:
161 _rejectunicode(t, getattr(mod, t, {}))
161 _rejectunicode(t, getattr(mod, t, {}))
162 for t in ['filesetpredicate', 'internalmerge', 'revsetpredicate',
162 for t in ['filesetpredicate', 'internalmerge', 'revsetpredicate',
163 'templatefilter', 'templatefunc', 'templatekeyword']:
163 'templatefilter', 'templatefunc', 'templatekeyword']:
164 o = getattr(mod, t, None)
164 o = getattr(mod, t, None)
165 if o:
165 if o:
166 _rejectunicode(t, o._table)
166 _rejectunicode(t, o._table)
167 _validatecmdtable(ui, getattr(mod, 'cmdtable', {}))
167 _validatecmdtable(ui, getattr(mod, 'cmdtable', {}))
168
168
169 def load(ui, name, path):
169 def load(ui, name, path, log=lambda *a: None):
170 if name.startswith('hgext.') or name.startswith('hgext/'):
170 if name.startswith('hgext.') or name.startswith('hgext/'):
171 shortname = name[6:]
171 shortname = name[6:]
172 else:
172 else:
173 shortname = name
173 shortname = name
174 if shortname in _builtin:
174 if shortname in _builtin:
175 return None
175 return None
176 if shortname in _extensions:
176 if shortname in _extensions:
177 return _extensions[shortname]
177 return _extensions[shortname]
178 log(' - loading extension: %r\n', shortname)
178 _extensions[shortname] = None
179 _extensions[shortname] = None
179 mod = _importext(name, path, bind(_reportimporterror, ui))
180 with util.timedcm() as stats:
181 mod = _importext(name, path, bind(_reportimporterror, ui))
182 log(' > %r extension loaded in %s\n', shortname, stats)
180
183
181 # Before we do anything with the extension, check against minimum stated
184 # Before we do anything with the extension, check against minimum stated
182 # compatibility. This gives extension authors a mechanism to have their
185 # compatibility. This gives extension authors a mechanism to have their
183 # extensions short circuit when loaded with a known incompatible version
186 # extensions short circuit when loaded with a known incompatible version
184 # of Mercurial.
187 # of Mercurial.
185 minver = getattr(mod, 'minimumhgversion', None)
188 minver = getattr(mod, 'minimumhgversion', None)
186 if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
189 if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2):
187 ui.warn(_('(third party extension %s requires version %s or newer '
190 ui.warn(_('(third party extension %s requires version %s or newer '
188 'of Mercurial; disabling)\n') % (shortname, minver))
191 'of Mercurial; disabling)\n') % (shortname, minver))
189 return
192 return
193 log(' - validating extension tables: %r\n', shortname)
190 _validatetables(ui, mod)
194 _validatetables(ui, mod)
191
195
192 _extensions[shortname] = mod
196 _extensions[shortname] = mod
193 _order.append(shortname)
197 _order.append(shortname)
194 for fn in _aftercallbacks.get(shortname, []):
198 log(' - invoking registered callbacks: %r\n', shortname)
195 fn(loaded=True)
199 with util.timedcm() as stats:
200 for fn in _aftercallbacks.get(shortname, []):
201 fn(loaded=True)
202 log(' > callbacks completed in %s\n', stats)
196 return mod
203 return mod
197
204
198 def _runuisetup(name, ui):
205 def _runuisetup(name, ui):
199 uisetup = getattr(_extensions[name], 'uisetup', None)
206 uisetup = getattr(_extensions[name], 'uisetup', None)
200 if uisetup:
207 if uisetup:
201 try:
208 try:
202 uisetup(ui)
209 uisetup(ui)
203 except Exception as inst:
210 except Exception as inst:
204 ui.traceback(force=True)
211 ui.traceback(force=True)
205 msg = stringutil.forcebytestr(inst)
212 msg = stringutil.forcebytestr(inst)
206 ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
213 ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
207 return False
214 return False
208 return True
215 return True
209
216
210 def _runextsetup(name, ui):
217 def _runextsetup(name, ui):
211 extsetup = getattr(_extensions[name], 'extsetup', None)
218 extsetup = getattr(_extensions[name], 'extsetup', None)
212 if extsetup:
219 if extsetup:
213 try:
220 try:
214 try:
221 try:
215 extsetup(ui)
222 extsetup(ui)
216 except TypeError:
223 except TypeError:
217 if pycompat.getargspec(extsetup).args:
224 if pycompat.getargspec(extsetup).args:
218 raise
225 raise
219 extsetup() # old extsetup with no ui argument
226 extsetup() # old extsetup with no ui argument
220 except Exception as inst:
227 except Exception as inst:
221 ui.traceback(force=True)
228 ui.traceback(force=True)
222 msg = stringutil.forcebytestr(inst)
229 msg = stringutil.forcebytestr(inst)
223 ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
230 ui.warn(_("*** failed to set up extension %s: %s\n") % (name, msg))
224 return False
231 return False
225 return True
232 return True
226
233
227 def loadall(ui, whitelist=None):
234 def loadall(ui, whitelist=None):
235 if ui.configbool('devel', 'debug.extensions'):
236 log = lambda msg, *values: ui.debug('debug.extensions: ',
237 msg % values, label='debug.extensions')
238 else:
239 log = lambda *a, **kw: None
228 result = ui.configitems("extensions")
240 result = ui.configitems("extensions")
229 if whitelist is not None:
241 if whitelist is not None:
230 result = [(k, v) for (k, v) in result if k in whitelist]
242 result = [(k, v) for (k, v) in result if k in whitelist]
231 newindex = len(_order)
243 newindex = len(_order)
232 for (name, path) in result:
244 log('loading %sextensions\n', 'additional ' if newindex else '')
233 if path:
245 log('- processing %d entries\n', len(result))
234 if path[0:1] == '!':
246 with util.timedcm() as stats:
235 _disabledextensions[name] = path[1:]
247 for (name, path) in result:
236 continue
237 try:
238 load(ui, name, path)
239 except Exception as inst:
240 msg = stringutil.forcebytestr(inst)
241 if path:
248 if path:
242 ui.warn(_("*** failed to import extension %s from %s: %s\n")
249 if path[0:1] == '!':
243 % (name, path, msg))
250 if name not in _disabledextensions:
244 else:
251 log(' - skipping disabled extension: %r\n', name)
245 ui.warn(_("*** failed to import extension %s: %s\n")
252 _disabledextensions[name] = path[1:]
246 % (name, msg))
253 continue
247 if isinstance(inst, error.Hint) and inst.hint:
254 try:
248 ui.warn(_("*** (%s)\n") % inst.hint)
255 load(ui, name, path, log)
249 ui.traceback()
256 except Exception as inst:
257 msg = stringutil.forcebytestr(inst)
258 if path:
259 ui.warn(_("*** failed to import extension %s from %s: %s\n")
260 % (name, path, msg))
261 else:
262 ui.warn(_("*** failed to import extension %s: %s\n")
263 % (name, msg))
264 if isinstance(inst, error.Hint) and inst.hint:
265 ui.warn(_("*** (%s)\n") % inst.hint)
266 ui.traceback()
267
268 log('> loaded %d extensions, total time %s\n',
269 len(_order) - newindex, stats)
250 # list of (objname, loadermod, loadername) tuple:
270 # list of (objname, loadermod, loadername) tuple:
251 # - objname is the name of an object in extension module,
271 # - objname is the name of an object in extension module,
252 # from which extra information is loaded
272 # from which extra information is loaded
253 # - loadermod is the module where loader is placed
273 # - loadermod is the module where loader is placed
254 # - loadername is the name of the function,
274 # - loadername is the name of the function,
255 # which takes (ui, extensionname, extraobj) arguments
275 # which takes (ui, extensionname, extraobj) arguments
256 #
276 #
257 # This one is for the list of item that must be run before running any setup
277 # This one is for the list of item that must be run before running any setup
258 earlyextraloaders = [
278 earlyextraloaders = [
259 ('configtable', configitems, 'loadconfigtable'),
279 ('configtable', configitems, 'loadconfigtable'),
260 ]
280 ]
281
282 log('- loading configtable attributes\n')
261 _loadextra(ui, newindex, earlyextraloaders)
283 _loadextra(ui, newindex, earlyextraloaders)
262
284
263 broken = set()
285 broken = set()
286 log('- executing uisetup hooks\n')
264 for name in _order[newindex:]:
287 for name in _order[newindex:]:
265 if not _runuisetup(name, ui):
288 log(' - running uisetup for %r\n', name)
266 broken.add(name)
289 with util.timedcm() as stats:
290 if not _runuisetup(name, ui):
291 log(' - the %r extension uisetup failed\n', name)
292 broken.add(name)
293 log(' > uisetup for %r took %s\n', name, stats)
267
294
295 log('- executing extsetup hooks\n')
268 for name in _order[newindex:]:
296 for name in _order[newindex:]:
269 if name in broken:
297 if name in broken:
270 continue
298 continue
271 if not _runextsetup(name, ui):
299 log(' - running extsetup for %r\n', name)
272 broken.add(name)
300 with util.timedcm() as stats:
301 if not _runextsetup(name, ui):
302 log(' - the %r extension extsetup failed\n', name)
303 broken.add(name)
304 log(' > extsetup for %r took %s\n', name, stats)
273
305
274 for name in broken:
306 for name in broken:
307 log(' - disabling broken %r extension\n', name)
275 _extensions[name] = None
308 _extensions[name] = None
276
309
277 # Call aftercallbacks that were never met.
310 # Call aftercallbacks that were never met.
278 for shortname in _aftercallbacks:
311 log('- executing remaining aftercallbacks\n')
279 if shortname in _extensions:
312 with util.timedcm() as stats:
280 continue
313 for shortname in _aftercallbacks:
314 if shortname in _extensions:
315 continue
281
316
282 for fn in _aftercallbacks[shortname]:
317 for fn in _aftercallbacks[shortname]:
283 fn(loaded=False)
318 log(' - extension %r not loaded, notify callbacks\n',
319 shortname)
320 fn(loaded=False)
321 log('> remaining aftercallbacks completed in %s\n', stats)
284
322
285 # loadall() is called multiple times and lingering _aftercallbacks
323 # loadall() is called multiple times and lingering _aftercallbacks
286 # entries could result in double execution. See issue4646.
324 # entries could result in double execution. See issue4646.
287 _aftercallbacks.clear()
325 _aftercallbacks.clear()
288
326
289 # delay importing avoids cyclic dependency (especially commands)
327 # delay importing avoids cyclic dependency (especially commands)
290 from . import (
328 from . import (
291 color,
329 color,
292 commands,
330 commands,
293 filemerge,
331 filemerge,
294 fileset,
332 fileset,
295 revset,
333 revset,
296 templatefilters,
334 templatefilters,
297 templatefuncs,
335 templatefuncs,
298 templatekw,
336 templatekw,
299 )
337 )
300
338
301 # list of (objname, loadermod, loadername) tuple:
339 # list of (objname, loadermod, loadername) tuple:
302 # - objname is the name of an object in extension module,
340 # - objname is the name of an object in extension module,
303 # from which extra information is loaded
341 # from which extra information is loaded
304 # - loadermod is the module where loader is placed
342 # - loadermod is the module where loader is placed
305 # - loadername is the name of the function,
343 # - loadername is the name of the function,
306 # which takes (ui, extensionname, extraobj) arguments
344 # which takes (ui, extensionname, extraobj) arguments
345 log('- loading extension registration objects\n')
307 extraloaders = [
346 extraloaders = [
308 ('cmdtable', commands, 'loadcmdtable'),
347 ('cmdtable', commands, 'loadcmdtable'),
309 ('colortable', color, 'loadcolortable'),
348 ('colortable', color, 'loadcolortable'),
310 ('filesetpredicate', fileset, 'loadpredicate'),
349 ('filesetpredicate', fileset, 'loadpredicate'),
311 ('internalmerge', filemerge, 'loadinternalmerge'),
350 ('internalmerge', filemerge, 'loadinternalmerge'),
312 ('revsetpredicate', revset, 'loadpredicate'),
351 ('revsetpredicate', revset, 'loadpredicate'),
313 ('templatefilter', templatefilters, 'loadfilter'),
352 ('templatefilter', templatefilters, 'loadfilter'),
314 ('templatefunc', templatefuncs, 'loadfunction'),
353 ('templatefunc', templatefuncs, 'loadfunction'),
315 ('templatekeyword', templatekw, 'loadkeyword'),
354 ('templatekeyword', templatekw, 'loadkeyword'),
316 ]
355 ]
317 _loadextra(ui, newindex, extraloaders)
356 with util.timedcm() as stats:
357 _loadextra(ui, newindex, extraloaders)
358 log('> extension registration object loading took %s\n', stats)
359 log('extension loading complete\n')
318
360
319 def _loadextra(ui, newindex, extraloaders):
361 def _loadextra(ui, newindex, extraloaders):
320 for name in _order[newindex:]:
362 for name in _order[newindex:]:
321 module = _extensions[name]
363 module = _extensions[name]
322 if not module:
364 if not module:
323 continue # loading this module failed
365 continue # loading this module failed
324
366
325 for objname, loadermod, loadername in extraloaders:
367 for objname, loadermod, loadername in extraloaders:
326 extraobj = getattr(module, objname, None)
368 extraobj = getattr(module, objname, None)
327 if extraobj is not None:
369 if extraobj is not None:
328 getattr(loadermod, loadername)(ui, name, extraobj)
370 getattr(loadermod, loadername)(ui, name, extraobj)
329
371
330 def afterloaded(extension, callback):
372 def afterloaded(extension, callback):
331 '''Run the specified function after a named extension is loaded.
373 '''Run the specified function after a named extension is loaded.
332
374
333 If the named extension is already loaded, the callback will be called
375 If the named extension is already loaded, the callback will be called
334 immediately.
376 immediately.
335
377
336 If the named extension never loads, the callback will be called after
378 If the named extension never loads, the callback will be called after
337 all extensions have been loaded.
379 all extensions have been loaded.
338
380
339 The callback receives the named argument ``loaded``, which is a boolean
381 The callback receives the named argument ``loaded``, which is a boolean
340 indicating whether the dependent extension actually loaded.
382 indicating whether the dependent extension actually loaded.
341 '''
383 '''
342
384
343 if extension in _extensions:
385 if extension in _extensions:
344 # Report loaded as False if the extension is disabled
386 # Report loaded as False if the extension is disabled
345 loaded = (_extensions[extension] is not None)
387 loaded = (_extensions[extension] is not None)
346 callback(loaded=loaded)
388 callback(loaded=loaded)
347 else:
389 else:
348 _aftercallbacks.setdefault(extension, []).append(callback)
390 _aftercallbacks.setdefault(extension, []).append(callback)
349
391
350 def bind(func, *args):
392 def bind(func, *args):
351 '''Partial function application
393 '''Partial function application
352
394
353 Returns a new function that is the partial application of args and kwargs
395 Returns a new function that is the partial application of args and kwargs
354 to func. For example,
396 to func. For example,
355
397
356 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
398 f(1, 2, bar=3) === bind(f, 1)(2, bar=3)'''
357 assert callable(func)
399 assert callable(func)
358 def closure(*a, **kw):
400 def closure(*a, **kw):
359 return func(*(args + a), **kw)
401 return func(*(args + a), **kw)
360 return closure
402 return closure
361
403
362 def _updatewrapper(wrap, origfn, unboundwrapper):
404 def _updatewrapper(wrap, origfn, unboundwrapper):
363 '''Copy and add some useful attributes to wrapper'''
405 '''Copy and add some useful attributes to wrapper'''
364 try:
406 try:
365 wrap.__name__ = origfn.__name__
407 wrap.__name__ = origfn.__name__
366 except AttributeError:
408 except AttributeError:
367 pass
409 pass
368 wrap.__module__ = getattr(origfn, '__module__')
410 wrap.__module__ = getattr(origfn, '__module__')
369 wrap.__doc__ = getattr(origfn, '__doc__')
411 wrap.__doc__ = getattr(origfn, '__doc__')
370 wrap.__dict__.update(getattr(origfn, '__dict__', {}))
412 wrap.__dict__.update(getattr(origfn, '__dict__', {}))
371 wrap._origfunc = origfn
413 wrap._origfunc = origfn
372 wrap._unboundwrapper = unboundwrapper
414 wrap._unboundwrapper = unboundwrapper
373
415
374 def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
416 def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
375 '''Wrap the command named `command' in table
417 '''Wrap the command named `command' in table
376
418
377 Replace command in the command table with wrapper. The wrapped command will
419 Replace command in the command table with wrapper. The wrapped command will
378 be inserted into the command table specified by the table argument.
420 be inserted into the command table specified by the table argument.
379
421
380 The wrapper will be called like
422 The wrapper will be called like
381
423
382 wrapper(orig, *args, **kwargs)
424 wrapper(orig, *args, **kwargs)
383
425
384 where orig is the original (wrapped) function, and *args, **kwargs
426 where orig is the original (wrapped) function, and *args, **kwargs
385 are the arguments passed to it.
427 are the arguments passed to it.
386
428
387 Optionally append to the command synopsis and docstring, used for help.
429 Optionally append to the command synopsis and docstring, used for help.
388 For example, if your extension wraps the ``bookmarks`` command to add the
430 For example, if your extension wraps the ``bookmarks`` command to add the
389 flags ``--remote`` and ``--all`` you might call this function like so:
431 flags ``--remote`` and ``--all`` you might call this function like so:
390
432
391 synopsis = ' [-a] [--remote]'
433 synopsis = ' [-a] [--remote]'
392 docstring = """
434 docstring = """
393
435
394 The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
436 The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``)
395 flags to the bookmarks command. Either flag will show the remote bookmarks
437 flags to the bookmarks command. Either flag will show the remote bookmarks
396 known to the repository; ``--remote`` will also suppress the output of the
438 known to the repository; ``--remote`` will also suppress the output of the
397 local bookmarks.
439 local bookmarks.
398 """
440 """
399
441
400 extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
442 extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks,
401 synopsis, docstring)
443 synopsis, docstring)
402 '''
444 '''
403 assert callable(wrapper)
445 assert callable(wrapper)
404 aliases, entry = cmdutil.findcmd(command, table)
446 aliases, entry = cmdutil.findcmd(command, table)
405 for alias, e in table.iteritems():
447 for alias, e in table.iteritems():
406 if e is entry:
448 if e is entry:
407 key = alias
449 key = alias
408 break
450 break
409
451
410 origfn = entry[0]
452 origfn = entry[0]
411 wrap = functools.partial(util.checksignature(wrapper),
453 wrap = functools.partial(util.checksignature(wrapper),
412 util.checksignature(origfn))
454 util.checksignature(origfn))
413 _updatewrapper(wrap, origfn, wrapper)
455 _updatewrapper(wrap, origfn, wrapper)
414 if docstring is not None:
456 if docstring is not None:
415 wrap.__doc__ += docstring
457 wrap.__doc__ += docstring
416
458
417 newentry = list(entry)
459 newentry = list(entry)
418 newentry[0] = wrap
460 newentry[0] = wrap
419 if synopsis is not None:
461 if synopsis is not None:
420 newentry[2] += synopsis
462 newentry[2] += synopsis
421 table[key] = tuple(newentry)
463 table[key] = tuple(newentry)
422 return entry
464 return entry
423
465
424 def wrapfilecache(cls, propname, wrapper):
466 def wrapfilecache(cls, propname, wrapper):
425 """Wraps a filecache property.
467 """Wraps a filecache property.
426
468
427 These can't be wrapped using the normal wrapfunction.
469 These can't be wrapped using the normal wrapfunction.
428 """
470 """
429 propname = pycompat.sysstr(propname)
471 propname = pycompat.sysstr(propname)
430 assert callable(wrapper)
472 assert callable(wrapper)
431 for currcls in cls.__mro__:
473 for currcls in cls.__mro__:
432 if propname in currcls.__dict__:
474 if propname in currcls.__dict__:
433 origfn = currcls.__dict__[propname].func
475 origfn = currcls.__dict__[propname].func
434 assert callable(origfn)
476 assert callable(origfn)
435 def wrap(*args, **kwargs):
477 def wrap(*args, **kwargs):
436 return wrapper(origfn, *args, **kwargs)
478 return wrapper(origfn, *args, **kwargs)
437 currcls.__dict__[propname].func = wrap
479 currcls.__dict__[propname].func = wrap
438 break
480 break
439
481
440 if currcls is object:
482 if currcls is object:
441 raise AttributeError(r"type '%s' has no property '%s'" % (
483 raise AttributeError(r"type '%s' has no property '%s'" % (
442 cls, propname))
484 cls, propname))
443
485
444 class wrappedfunction(object):
486 class wrappedfunction(object):
445 '''context manager for temporarily wrapping a function'''
487 '''context manager for temporarily wrapping a function'''
446
488
447 def __init__(self, container, funcname, wrapper):
489 def __init__(self, container, funcname, wrapper):
448 assert callable(wrapper)
490 assert callable(wrapper)
449 self._container = container
491 self._container = container
450 self._funcname = funcname
492 self._funcname = funcname
451 self._wrapper = wrapper
493 self._wrapper = wrapper
452
494
453 def __enter__(self):
495 def __enter__(self):
454 wrapfunction(self._container, self._funcname, self._wrapper)
496 wrapfunction(self._container, self._funcname, self._wrapper)
455
497
456 def __exit__(self, exctype, excvalue, traceback):
498 def __exit__(self, exctype, excvalue, traceback):
457 unwrapfunction(self._container, self._funcname, self._wrapper)
499 unwrapfunction(self._container, self._funcname, self._wrapper)
458
500
459 def wrapfunction(container, funcname, wrapper):
501 def wrapfunction(container, funcname, wrapper):
460 '''Wrap the function named funcname in container
502 '''Wrap the function named funcname in container
461
503
462 Replace the funcname member in the given container with the specified
504 Replace the funcname member in the given container with the specified
463 wrapper. The container is typically a module, class, or instance.
505 wrapper. The container is typically a module, class, or instance.
464
506
465 The wrapper will be called like
507 The wrapper will be called like
466
508
467 wrapper(orig, *args, **kwargs)
509 wrapper(orig, *args, **kwargs)
468
510
469 where orig is the original (wrapped) function, and *args, **kwargs
511 where orig is the original (wrapped) function, and *args, **kwargs
470 are the arguments passed to it.
512 are the arguments passed to it.
471
513
472 Wrapping methods of the repository object is not recommended since
514 Wrapping methods of the repository object is not recommended since
473 it conflicts with extensions that extend the repository by
515 it conflicts with extensions that extend the repository by
474 subclassing. All extensions that need to extend methods of
516 subclassing. All extensions that need to extend methods of
475 localrepository should use this subclassing trick: namely,
517 localrepository should use this subclassing trick: namely,
476 reposetup() should look like
518 reposetup() should look like
477
519
478 def reposetup(ui, repo):
520 def reposetup(ui, repo):
479 class myrepo(repo.__class__):
521 class myrepo(repo.__class__):
480 def whatever(self, *args, **kwargs):
522 def whatever(self, *args, **kwargs):
481 [...extension stuff...]
523 [...extension stuff...]
482 super(myrepo, self).whatever(*args, **kwargs)
524 super(myrepo, self).whatever(*args, **kwargs)
483 [...extension stuff...]
525 [...extension stuff...]
484
526
485 repo.__class__ = myrepo
527 repo.__class__ = myrepo
486
528
487 In general, combining wrapfunction() with subclassing does not
529 In general, combining wrapfunction() with subclassing does not
488 work. Since you cannot control what other extensions are loaded by
530 work. Since you cannot control what other extensions are loaded by
489 your end users, you should play nicely with others by using the
531 your end users, you should play nicely with others by using the
490 subclass trick.
532 subclass trick.
491 '''
533 '''
492 assert callable(wrapper)
534 assert callable(wrapper)
493
535
494 origfn = getattr(container, funcname)
536 origfn = getattr(container, funcname)
495 assert callable(origfn)
537 assert callable(origfn)
496 if inspect.ismodule(container):
538 if inspect.ismodule(container):
497 # origfn is not an instance or class method. "partial" can be used.
539 # origfn is not an instance or class method. "partial" can be used.
498 # "partial" won't insert a frame in traceback.
540 # "partial" won't insert a frame in traceback.
499 wrap = functools.partial(wrapper, origfn)
541 wrap = functools.partial(wrapper, origfn)
500 else:
542 else:
501 # "partial" cannot be safely used. Emulate its effect by using "bind".
543 # "partial" cannot be safely used. Emulate its effect by using "bind".
502 # The downside is one more frame in traceback.
544 # The downside is one more frame in traceback.
503 wrap = bind(wrapper, origfn)
545 wrap = bind(wrapper, origfn)
504 _updatewrapper(wrap, origfn, wrapper)
546 _updatewrapper(wrap, origfn, wrapper)
505 setattr(container, funcname, wrap)
547 setattr(container, funcname, wrap)
506 return origfn
548 return origfn
507
549
508 def unwrapfunction(container, funcname, wrapper=None):
550 def unwrapfunction(container, funcname, wrapper=None):
509 '''undo wrapfunction
551 '''undo wrapfunction
510
552
511 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
553 If wrappers is None, undo the last wrap. Otherwise removes the wrapper
512 from the chain of wrappers.
554 from the chain of wrappers.
513
555
514 Return the removed wrapper.
556 Return the removed wrapper.
515 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
557 Raise IndexError if wrapper is None and nothing to unwrap; ValueError if
516 wrapper is not None but is not found in the wrapper chain.
558 wrapper is not None but is not found in the wrapper chain.
517 '''
559 '''
518 chain = getwrapperchain(container, funcname)
560 chain = getwrapperchain(container, funcname)
519 origfn = chain.pop()
561 origfn = chain.pop()
520 if wrapper is None:
562 if wrapper is None:
521 wrapper = chain[0]
563 wrapper = chain[0]
522 chain.remove(wrapper)
564 chain.remove(wrapper)
523 setattr(container, funcname, origfn)
565 setattr(container, funcname, origfn)
524 for w in reversed(chain):
566 for w in reversed(chain):
525 wrapfunction(container, funcname, w)
567 wrapfunction(container, funcname, w)
526 return wrapper
568 return wrapper
527
569
528 def getwrapperchain(container, funcname):
570 def getwrapperchain(container, funcname):
529 '''get a chain of wrappers of a function
571 '''get a chain of wrappers of a function
530
572
531 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
573 Return a list of functions: [newest wrapper, ..., oldest wrapper, origfunc]
532
574
533 The wrapper functions are the ones passed to wrapfunction, whose first
575 The wrapper functions are the ones passed to wrapfunction, whose first
534 argument is origfunc.
576 argument is origfunc.
535 '''
577 '''
536 result = []
578 result = []
537 fn = getattr(container, funcname)
579 fn = getattr(container, funcname)
538 while fn:
580 while fn:
539 assert callable(fn)
581 assert callable(fn)
540 result.append(getattr(fn, '_unboundwrapper', fn))
582 result.append(getattr(fn, '_unboundwrapper', fn))
541 fn = getattr(fn, '_origfunc', None)
583 fn = getattr(fn, '_origfunc', None)
542 return result
584 return result
543
585
544 def _disabledpaths():
586 def _disabledpaths():
545 '''find paths of disabled extensions. returns a dict of {name: path}'''
587 '''find paths of disabled extensions. returns a dict of {name: path}'''
546 import hgext
588 import hgext
547 extpath = os.path.dirname(
589 extpath = os.path.dirname(
548 os.path.abspath(pycompat.fsencode(hgext.__file__)))
590 os.path.abspath(pycompat.fsencode(hgext.__file__)))
549 try: # might not be a filesystem path
591 try: # might not be a filesystem path
550 files = os.listdir(extpath)
592 files = os.listdir(extpath)
551 except OSError:
593 except OSError:
552 return {}
594 return {}
553
595
554 exts = {}
596 exts = {}
555 for e in files:
597 for e in files:
556 if e.endswith('.py'):
598 if e.endswith('.py'):
557 name = e.rsplit('.', 1)[0]
599 name = e.rsplit('.', 1)[0]
558 path = os.path.join(extpath, e)
600 path = os.path.join(extpath, e)
559 else:
601 else:
560 name = e
602 name = e
561 path = os.path.join(extpath, e, '__init__.py')
603 path = os.path.join(extpath, e, '__init__.py')
562 if not os.path.exists(path):
604 if not os.path.exists(path):
563 continue
605 continue
564 if name in exts or name in _order or name == '__init__':
606 if name in exts or name in _order or name == '__init__':
565 continue
607 continue
566 exts[name] = path
608 exts[name] = path
567 for name, path in _disabledextensions.iteritems():
609 for name, path in _disabledextensions.iteritems():
568 # If no path was provided for a disabled extension (e.g. "color=!"),
610 # If no path was provided for a disabled extension (e.g. "color=!"),
569 # don't replace the path we already found by the scan above.
611 # don't replace the path we already found by the scan above.
570 if path:
612 if path:
571 exts[name] = path
613 exts[name] = path
572 return exts
614 return exts
573
615
574 def _moduledoc(file):
616 def _moduledoc(file):
575 '''return the top-level python documentation for the given file
617 '''return the top-level python documentation for the given file
576
618
577 Loosely inspired by pydoc.source_synopsis(), but rewritten to
619 Loosely inspired by pydoc.source_synopsis(), but rewritten to
578 handle triple quotes and to return the whole text instead of just
620 handle triple quotes and to return the whole text instead of just
579 the synopsis'''
621 the synopsis'''
580 result = []
622 result = []
581
623
582 line = file.readline()
624 line = file.readline()
583 while line[:1] == '#' or not line.strip():
625 while line[:1] == '#' or not line.strip():
584 line = file.readline()
626 line = file.readline()
585 if not line:
627 if not line:
586 break
628 break
587
629
588 start = line[:3]
630 start = line[:3]
589 if start == '"""' or start == "'''":
631 if start == '"""' or start == "'''":
590 line = line[3:]
632 line = line[3:]
591 while line:
633 while line:
592 if line.rstrip().endswith(start):
634 if line.rstrip().endswith(start):
593 line = line.split(start)[0]
635 line = line.split(start)[0]
594 if line:
636 if line:
595 result.append(line)
637 result.append(line)
596 break
638 break
597 elif not line:
639 elif not line:
598 return None # unmatched delimiter
640 return None # unmatched delimiter
599 result.append(line)
641 result.append(line)
600 line = file.readline()
642 line = file.readline()
601 else:
643 else:
602 return None
644 return None
603
645
604 return ''.join(result)
646 return ''.join(result)
605
647
606 def _disabledhelp(path):
648 def _disabledhelp(path):
607 '''retrieve help synopsis of a disabled extension (without importing)'''
649 '''retrieve help synopsis of a disabled extension (without importing)'''
608 try:
650 try:
609 with open(path, 'rb') as src:
651 with open(path, 'rb') as src:
610 doc = _moduledoc(src)
652 doc = _moduledoc(src)
611 except IOError:
653 except IOError:
612 return
654 return
613
655
614 if doc: # extracting localized synopsis
656 if doc: # extracting localized synopsis
615 return gettext(doc)
657 return gettext(doc)
616 else:
658 else:
617 return _('(no help text available)')
659 return _('(no help text available)')
618
660
619 def disabled():
661 def disabled():
620 '''find disabled extensions from hgext. returns a dict of {name: desc}'''
662 '''find disabled extensions from hgext. returns a dict of {name: desc}'''
621 try:
663 try:
622 from hgext import __index__
664 from hgext import __index__
623 return dict((name, gettext(desc))
665 return dict((name, gettext(desc))
624 for name, desc in __index__.docs.iteritems()
666 for name, desc in __index__.docs.iteritems()
625 if name not in _order)
667 if name not in _order)
626 except (ImportError, AttributeError):
668 except (ImportError, AttributeError):
627 pass
669 pass
628
670
629 paths = _disabledpaths()
671 paths = _disabledpaths()
630 if not paths:
672 if not paths:
631 return {}
673 return {}
632
674
633 exts = {}
675 exts = {}
634 for name, path in paths.iteritems():
676 for name, path in paths.iteritems():
635 doc = _disabledhelp(path)
677 doc = _disabledhelp(path)
636 if doc:
678 if doc:
637 exts[name] = doc.splitlines()[0]
679 exts[name] = doc.splitlines()[0]
638
680
639 return exts
681 return exts
640
682
641 def disabledext(name):
683 def disabledext(name):
642 '''find a specific disabled extension from hgext. returns desc'''
684 '''find a specific disabled extension from hgext. returns desc'''
643 try:
685 try:
644 from hgext import __index__
686 from hgext import __index__
645 if name in _order: # enabled
687 if name in _order: # enabled
646 return
688 return
647 else:
689 else:
648 return gettext(__index__.docs.get(name))
690 return gettext(__index__.docs.get(name))
649 except (ImportError, AttributeError):
691 except (ImportError, AttributeError):
650 pass
692 pass
651
693
652 paths = _disabledpaths()
694 paths = _disabledpaths()
653 if name in paths:
695 if name in paths:
654 return _disabledhelp(paths[name])
696 return _disabledhelp(paths[name])
655
697
656 def _walkcommand(node):
698 def _walkcommand(node):
657 """Scan @command() decorators in the tree starting at node"""
699 """Scan @command() decorators in the tree starting at node"""
658 todo = collections.deque([node])
700 todo = collections.deque([node])
659 while todo:
701 while todo:
660 node = todo.popleft()
702 node = todo.popleft()
661 if not isinstance(node, ast.FunctionDef):
703 if not isinstance(node, ast.FunctionDef):
662 todo.extend(ast.iter_child_nodes(node))
704 todo.extend(ast.iter_child_nodes(node))
663 continue
705 continue
664 for d in node.decorator_list:
706 for d in node.decorator_list:
665 if not isinstance(d, ast.Call):
707 if not isinstance(d, ast.Call):
666 continue
708 continue
667 if not isinstance(d.func, ast.Name):
709 if not isinstance(d.func, ast.Name):
668 continue
710 continue
669 if d.func.id != r'command':
711 if d.func.id != r'command':
670 continue
712 continue
671 yield d
713 yield d
672
714
673 def _disabledcmdtable(path):
715 def _disabledcmdtable(path):
674 """Construct a dummy command table without loading the extension module
716 """Construct a dummy command table without loading the extension module
675
717
676 This may raise IOError or SyntaxError.
718 This may raise IOError or SyntaxError.
677 """
719 """
678 with open(path, 'rb') as src:
720 with open(path, 'rb') as src:
679 root = ast.parse(src.read(), path)
721 root = ast.parse(src.read(), path)
680 cmdtable = {}
722 cmdtable = {}
681 for node in _walkcommand(root):
723 for node in _walkcommand(root):
682 if not node.args:
724 if not node.args:
683 continue
725 continue
684 a = node.args[0]
726 a = node.args[0]
685 if isinstance(a, ast.Str):
727 if isinstance(a, ast.Str):
686 name = pycompat.sysbytes(a.s)
728 name = pycompat.sysbytes(a.s)
687 elif pycompat.ispy3 and isinstance(a, ast.Bytes):
729 elif pycompat.ispy3 and isinstance(a, ast.Bytes):
688 name = a.s
730 name = a.s
689 else:
731 else:
690 continue
732 continue
691 cmdtable[name] = (None, [], b'')
733 cmdtable[name] = (None, [], b'')
692 return cmdtable
734 return cmdtable
693
735
694 def _finddisabledcmd(ui, cmd, name, path, strict):
736 def _finddisabledcmd(ui, cmd, name, path, strict):
695 try:
737 try:
696 cmdtable = _disabledcmdtable(path)
738 cmdtable = _disabledcmdtable(path)
697 except (IOError, SyntaxError):
739 except (IOError, SyntaxError):
698 return
740 return
699 try:
741 try:
700 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
742 aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
701 except (error.AmbiguousCommand, error.UnknownCommand):
743 except (error.AmbiguousCommand, error.UnknownCommand):
702 return
744 return
703 for c in aliases:
745 for c in aliases:
704 if c.startswith(cmd):
746 if c.startswith(cmd):
705 cmd = c
747 cmd = c
706 break
748 break
707 else:
749 else:
708 cmd = aliases[0]
750 cmd = aliases[0]
709 doc = _disabledhelp(path)
751 doc = _disabledhelp(path)
710 return (cmd, name, doc)
752 return (cmd, name, doc)
711
753
712 def disabledcmd(ui, cmd, strict=False):
754 def disabledcmd(ui, cmd, strict=False):
713 '''find cmd from disabled extensions without importing.
755 '''find cmd from disabled extensions without importing.
714 returns (cmdname, extname, doc)'''
756 returns (cmdname, extname, doc)'''
715
757
716 paths = _disabledpaths()
758 paths = _disabledpaths()
717 if not paths:
759 if not paths:
718 raise error.UnknownCommand(cmd)
760 raise error.UnknownCommand(cmd)
719
761
720 ext = None
762 ext = None
721 # first, search for an extension with the same name as the command
763 # first, search for an extension with the same name as the command
722 path = paths.pop(cmd, None)
764 path = paths.pop(cmd, None)
723 if path:
765 if path:
724 ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict)
766 ext = _finddisabledcmd(ui, cmd, cmd, path, strict=strict)
725 if not ext:
767 if not ext:
726 # otherwise, interrogate each extension until there's a match
768 # otherwise, interrogate each extension until there's a match
727 for name, path in paths.iteritems():
769 for name, path in paths.iteritems():
728 ext = _finddisabledcmd(ui, cmd, name, path, strict=strict)
770 ext = _finddisabledcmd(ui, cmd, name, path, strict=strict)
729 if ext:
771 if ext:
730 break
772 break
731 if ext:
773 if ext:
732 return ext
774 return ext
733
775
734 raise error.UnknownCommand(cmd)
776 raise error.UnknownCommand(cmd)
735
777
736 def enabled(shortname=True):
778 def enabled(shortname=True):
737 '''return a dict of {name: desc} of extensions'''
779 '''return a dict of {name: desc} of extensions'''
738 exts = {}
780 exts = {}
739 for ename, ext in extensions():
781 for ename, ext in extensions():
740 doc = (gettext(ext.__doc__) or _('(no help text available)'))
782 doc = (gettext(ext.__doc__) or _('(no help text available)'))
741 if shortname:
783 if shortname:
742 ename = ename.split('.')[-1]
784 ename = ename.split('.')[-1]
743 exts[ename] = doc.splitlines()[0].strip()
785 exts[ename] = doc.splitlines()[0].strip()
744
786
745 return exts
787 return exts
746
788
747 def notloaded():
789 def notloaded():
748 '''return short names of extensions that failed to load'''
790 '''return short names of extensions that failed to load'''
749 return [name for name, mod in _extensions.iteritems() if mod is None]
791 return [name for name, mod in _extensions.iteritems() if mod is None]
750
792
751 def moduleversion(module):
793 def moduleversion(module):
752 '''return version information from given module as a string'''
794 '''return version information from given module as a string'''
753 if (util.safehasattr(module, 'getversion')
795 if (util.safehasattr(module, 'getversion')
754 and callable(module.getversion)):
796 and callable(module.getversion)):
755 version = module.getversion()
797 version = module.getversion()
756 elif util.safehasattr(module, '__version__'):
798 elif util.safehasattr(module, '__version__'):
757 version = module.__version__
799 version = module.__version__
758 else:
800 else:
759 version = ''
801 version = ''
760 if isinstance(version, (list, tuple)):
802 if isinstance(version, (list, tuple)):
761 version = '.'.join(pycompat.bytestr(o) for o in version)
803 version = '.'.join(pycompat.bytestr(o) for o in version)
762 return version
804 return version
763
805
764 def ismoduleinternal(module):
806 def ismoduleinternal(module):
765 exttestedwith = getattr(module, 'testedwith', None)
807 exttestedwith = getattr(module, 'testedwith', None)
766 return exttestedwith == "ships-with-hg-core"
808 return exttestedwith == "ships-with-hg-core"
@@ -1,1169 +1,1177 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import
9 from __future__ import absolute_import
10
10
11 import errno
11 import errno
12 import functools
12 import hashlib
13 import hashlib
13 import os
14 import os
14 import shutil
15 import shutil
15 import stat
16 import stat
16
17
17 from .i18n import _
18 from .i18n import _
18 from .node import (
19 from .node import (
19 nullid,
20 nullid,
20 )
21 )
21
22
22 from . import (
23 from . import (
23 bookmarks,
24 bookmarks,
24 bundlerepo,
25 bundlerepo,
25 cacheutil,
26 cacheutil,
26 cmdutil,
27 cmdutil,
27 destutil,
28 destutil,
28 discovery,
29 discovery,
29 error,
30 error,
30 exchange,
31 exchange,
31 extensions,
32 extensions,
32 httppeer,
33 httppeer,
33 localrepo,
34 localrepo,
34 lock,
35 lock,
35 logcmdutil,
36 logcmdutil,
36 logexchange,
37 logexchange,
37 merge as mergemod,
38 merge as mergemod,
38 node,
39 node,
39 phases,
40 phases,
40 scmutil,
41 scmutil,
41 sshpeer,
42 sshpeer,
42 statichttprepo,
43 statichttprepo,
43 ui as uimod,
44 ui as uimod,
44 unionrepo,
45 unionrepo,
45 url,
46 url,
46 util,
47 util,
47 verify as verifymod,
48 verify as verifymod,
48 vfs as vfsmod,
49 vfs as vfsmod,
49 )
50 )
50
51
51 from .utils import (
52 from .utils import (
52 stringutil,
53 stringutil,
53 )
54 )
54
55
55 release = lock.release
56 release = lock.release
56
57
57 # shared features
58 # shared features
58 sharedbookmarks = 'bookmarks'
59 sharedbookmarks = 'bookmarks'
59
60
60 def _local(path):
61 def _local(path):
61 path = util.expandpath(util.urllocalpath(path))
62 path = util.expandpath(util.urllocalpath(path))
62 return (os.path.isfile(path) and bundlerepo or localrepo)
63 return (os.path.isfile(path) and bundlerepo or localrepo)
63
64
64 def addbranchrevs(lrepo, other, branches, revs):
65 def addbranchrevs(lrepo, other, branches, revs):
65 peer = other.peer() # a courtesy to callers using a localrepo for other
66 peer = other.peer() # a courtesy to callers using a localrepo for other
66 hashbranch, branches = branches
67 hashbranch, branches = branches
67 if not hashbranch and not branches:
68 if not hashbranch and not branches:
68 x = revs or None
69 x = revs or None
69 if revs:
70 if revs:
70 y = revs[0]
71 y = revs[0]
71 else:
72 else:
72 y = None
73 y = None
73 return x, y
74 return x, y
74 if revs:
75 if revs:
75 revs = list(revs)
76 revs = list(revs)
76 else:
77 else:
77 revs = []
78 revs = []
78
79
79 if not peer.capable('branchmap'):
80 if not peer.capable('branchmap'):
80 if branches:
81 if branches:
81 raise error.Abort(_("remote branch lookup not supported"))
82 raise error.Abort(_("remote branch lookup not supported"))
82 revs.append(hashbranch)
83 revs.append(hashbranch)
83 return revs, revs[0]
84 return revs, revs[0]
84
85
85 with peer.commandexecutor() as e:
86 with peer.commandexecutor() as e:
86 branchmap = e.callcommand('branchmap', {}).result()
87 branchmap = e.callcommand('branchmap', {}).result()
87
88
88 def primary(branch):
89 def primary(branch):
89 if branch == '.':
90 if branch == '.':
90 if not lrepo:
91 if not lrepo:
91 raise error.Abort(_("dirstate branch not accessible"))
92 raise error.Abort(_("dirstate branch not accessible"))
92 branch = lrepo.dirstate.branch()
93 branch = lrepo.dirstate.branch()
93 if branch in branchmap:
94 if branch in branchmap:
94 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
95 revs.extend(node.hex(r) for r in reversed(branchmap[branch]))
95 return True
96 return True
96 else:
97 else:
97 return False
98 return False
98
99
99 for branch in branches:
100 for branch in branches:
100 if not primary(branch):
101 if not primary(branch):
101 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
102 raise error.RepoLookupError(_("unknown branch '%s'") % branch)
102 if hashbranch:
103 if hashbranch:
103 if not primary(hashbranch):
104 if not primary(hashbranch):
104 revs.append(hashbranch)
105 revs.append(hashbranch)
105 return revs, revs[0]
106 return revs, revs[0]
106
107
107 def parseurl(path, branches=None):
108 def parseurl(path, branches=None):
108 '''parse url#branch, returning (url, (branch, branches))'''
109 '''parse url#branch, returning (url, (branch, branches))'''
109
110
110 u = util.url(path)
111 u = util.url(path)
111 branch = None
112 branch = None
112 if u.fragment:
113 if u.fragment:
113 branch = u.fragment
114 branch = u.fragment
114 u.fragment = None
115 u.fragment = None
115 return bytes(u), (branch, branches or [])
116 return bytes(u), (branch, branches or [])
116
117
117 schemes = {
118 schemes = {
118 'bundle': bundlerepo,
119 'bundle': bundlerepo,
119 'union': unionrepo,
120 'union': unionrepo,
120 'file': _local,
121 'file': _local,
121 'http': httppeer,
122 'http': httppeer,
122 'https': httppeer,
123 'https': httppeer,
123 'ssh': sshpeer,
124 'ssh': sshpeer,
124 'static-http': statichttprepo,
125 'static-http': statichttprepo,
125 }
126 }
126
127
127 def _peerlookup(path):
128 def _peerlookup(path):
128 u = util.url(path)
129 u = util.url(path)
129 scheme = u.scheme or 'file'
130 scheme = u.scheme or 'file'
130 thing = schemes.get(scheme) or schemes['file']
131 thing = schemes.get(scheme) or schemes['file']
131 try:
132 try:
132 return thing(path)
133 return thing(path)
133 except TypeError:
134 except TypeError:
134 # we can't test callable(thing) because 'thing' can be an unloaded
135 # we can't test callable(thing) because 'thing' can be an unloaded
135 # module that implements __call__
136 # module that implements __call__
136 if not util.safehasattr(thing, 'instance'):
137 if not util.safehasattr(thing, 'instance'):
137 raise
138 raise
138 return thing
139 return thing
139
140
140 def islocal(repo):
141 def islocal(repo):
141 '''return true if repo (or path pointing to repo) is local'''
142 '''return true if repo (or path pointing to repo) is local'''
142 if isinstance(repo, bytes):
143 if isinstance(repo, bytes):
143 try:
144 try:
144 return _peerlookup(repo).islocal(repo)
145 return _peerlookup(repo).islocal(repo)
145 except AttributeError:
146 except AttributeError:
146 return False
147 return False
147 return repo.local()
148 return repo.local()
148
149
149 def openpath(ui, path):
150 def openpath(ui, path):
150 '''open path with open if local, url.open if remote'''
151 '''open path with open if local, url.open if remote'''
151 pathurl = util.url(path, parsequery=False, parsefragment=False)
152 pathurl = util.url(path, parsequery=False, parsefragment=False)
152 if pathurl.islocal():
153 if pathurl.islocal():
153 return util.posixfile(pathurl.localpath(), 'rb')
154 return util.posixfile(pathurl.localpath(), 'rb')
154 else:
155 else:
155 return url.open(ui, path)
156 return url.open(ui, path)
156
157
157 # a list of (ui, repo) functions called for wire peer initialization
158 # a list of (ui, repo) functions called for wire peer initialization
158 wirepeersetupfuncs = []
159 wirepeersetupfuncs = []
159
160
160 def _peerorrepo(ui, path, create=False, presetupfuncs=None,
161 def _peerorrepo(ui, path, create=False, presetupfuncs=None,
161 intents=None):
162 intents=None):
162 """return a repository object for the specified path"""
163 """return a repository object for the specified path"""
163 obj = _peerlookup(path).instance(ui, path, create, intents=intents)
164 obj = _peerlookup(path).instance(ui, path, create, intents=intents)
164 ui = getattr(obj, "ui", ui)
165 ui = getattr(obj, "ui", ui)
166 if ui.configbool('devel', 'debug.extensions'):
167 log = functools.partial(
168 ui.debug, 'debug.extensions: ', label='debug.extensions')
169 else:
170 log = lambda *a, **kw: None
165 for f in presetupfuncs or []:
171 for f in presetupfuncs or []:
166 f(ui, obj)
172 f(ui, obj)
173 log('- executing reposetup hooks\n')
167 for name, module in extensions.extensions(ui):
174 for name, module in extensions.extensions(ui):
175 log(' - running reposetup for %s\n' % (name,))
168 hook = getattr(module, 'reposetup', None)
176 hook = getattr(module, 'reposetup', None)
169 if hook:
177 if hook:
170 hook(ui, obj)
178 hook(ui, obj)
171 if not obj.local():
179 if not obj.local():
172 for f in wirepeersetupfuncs:
180 for f in wirepeersetupfuncs:
173 f(ui, obj)
181 f(ui, obj)
174 return obj
182 return obj
175
183
176 def repository(ui, path='', create=False, presetupfuncs=None, intents=None):
184 def repository(ui, path='', create=False, presetupfuncs=None, intents=None):
177 """return a repository object for the specified path"""
185 """return a repository object for the specified path"""
178 peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs,
186 peer = _peerorrepo(ui, path, create, presetupfuncs=presetupfuncs,
179 intents=intents)
187 intents=intents)
180 repo = peer.local()
188 repo = peer.local()
181 if not repo:
189 if not repo:
182 raise error.Abort(_("repository '%s' is not local") %
190 raise error.Abort(_("repository '%s' is not local") %
183 (path or peer.url()))
191 (path or peer.url()))
184 return repo.filtered('visible')
192 return repo.filtered('visible')
185
193
186 def peer(uiorrepo, opts, path, create=False, intents=None):
194 def peer(uiorrepo, opts, path, create=False, intents=None):
187 '''return a repository peer for the specified path'''
195 '''return a repository peer for the specified path'''
188 rui = remoteui(uiorrepo, opts)
196 rui = remoteui(uiorrepo, opts)
189 return _peerorrepo(rui, path, create, intents=intents).peer()
197 return _peerorrepo(rui, path, create, intents=intents).peer()
190
198
191 def defaultdest(source):
199 def defaultdest(source):
192 '''return default destination of clone if none is given
200 '''return default destination of clone if none is given
193
201
194 >>> defaultdest(b'foo')
202 >>> defaultdest(b'foo')
195 'foo'
203 'foo'
196 >>> defaultdest(b'/foo/bar')
204 >>> defaultdest(b'/foo/bar')
197 'bar'
205 'bar'
198 >>> defaultdest(b'/')
206 >>> defaultdest(b'/')
199 ''
207 ''
200 >>> defaultdest(b'')
208 >>> defaultdest(b'')
201 ''
209 ''
202 >>> defaultdest(b'http://example.org/')
210 >>> defaultdest(b'http://example.org/')
203 ''
211 ''
204 >>> defaultdest(b'http://example.org/foo/')
212 >>> defaultdest(b'http://example.org/foo/')
205 'foo'
213 'foo'
206 '''
214 '''
207 path = util.url(source).path
215 path = util.url(source).path
208 if not path:
216 if not path:
209 return ''
217 return ''
210 return os.path.basename(os.path.normpath(path))
218 return os.path.basename(os.path.normpath(path))
211
219
212 def sharedreposource(repo):
220 def sharedreposource(repo):
213 """Returns repository object for source repository of a shared repo.
221 """Returns repository object for source repository of a shared repo.
214
222
215 If repo is not a shared repository, returns None.
223 If repo is not a shared repository, returns None.
216 """
224 """
217 if repo.sharedpath == repo.path:
225 if repo.sharedpath == repo.path:
218 return None
226 return None
219
227
220 if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
228 if util.safehasattr(repo, 'srcrepo') and repo.srcrepo:
221 return repo.srcrepo
229 return repo.srcrepo
222
230
223 # the sharedpath always ends in the .hg; we want the path to the repo
231 # the sharedpath always ends in the .hg; we want the path to the repo
224 source = repo.vfs.split(repo.sharedpath)[0]
232 source = repo.vfs.split(repo.sharedpath)[0]
225 srcurl, branches = parseurl(source)
233 srcurl, branches = parseurl(source)
226 srcrepo = repository(repo.ui, srcurl)
234 srcrepo = repository(repo.ui, srcurl)
227 repo.srcrepo = srcrepo
235 repo.srcrepo = srcrepo
228 return srcrepo
236 return srcrepo
229
237
230 def share(ui, source, dest=None, update=True, bookmarks=True, defaultpath=None,
238 def share(ui, source, dest=None, update=True, bookmarks=True, defaultpath=None,
231 relative=False):
239 relative=False):
232 '''create a shared repository'''
240 '''create a shared repository'''
233
241
234 if not islocal(source):
242 if not islocal(source):
235 raise error.Abort(_('can only share local repositories'))
243 raise error.Abort(_('can only share local repositories'))
236
244
237 if not dest:
245 if not dest:
238 dest = defaultdest(source)
246 dest = defaultdest(source)
239 else:
247 else:
240 dest = ui.expandpath(dest)
248 dest = ui.expandpath(dest)
241
249
242 if isinstance(source, bytes):
250 if isinstance(source, bytes):
243 origsource = ui.expandpath(source)
251 origsource = ui.expandpath(source)
244 source, branches = parseurl(origsource)
252 source, branches = parseurl(origsource)
245 srcrepo = repository(ui, source)
253 srcrepo = repository(ui, source)
246 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
254 rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
247 else:
255 else:
248 srcrepo = source.local()
256 srcrepo = source.local()
249 origsource = source = srcrepo.url()
257 origsource = source = srcrepo.url()
250 checkout = None
258 checkout = None
251
259
252 sharedpath = srcrepo.sharedpath # if our source is already sharing
260 sharedpath = srcrepo.sharedpath # if our source is already sharing
253
261
254 destwvfs = vfsmod.vfs(dest, realpath=True)
262 destwvfs = vfsmod.vfs(dest, realpath=True)
255 destvfs = vfsmod.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
263 destvfs = vfsmod.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True)
256
264
257 if destvfs.lexists():
265 if destvfs.lexists():
258 raise error.Abort(_('destination already exists'))
266 raise error.Abort(_('destination already exists'))
259
267
260 if not destwvfs.isdir():
268 if not destwvfs.isdir():
261 destwvfs.mkdir()
269 destwvfs.mkdir()
262 destvfs.makedir()
270 destvfs.makedir()
263
271
264 requirements = ''
272 requirements = ''
265 try:
273 try:
266 requirements = srcrepo.vfs.read('requires')
274 requirements = srcrepo.vfs.read('requires')
267 except IOError as inst:
275 except IOError as inst:
268 if inst.errno != errno.ENOENT:
276 if inst.errno != errno.ENOENT:
269 raise
277 raise
270
278
271 if relative:
279 if relative:
272 try:
280 try:
273 sharedpath = os.path.relpath(sharedpath, destvfs.base)
281 sharedpath = os.path.relpath(sharedpath, destvfs.base)
274 requirements += 'relshared\n'
282 requirements += 'relshared\n'
275 except (IOError, ValueError) as e:
283 except (IOError, ValueError) as e:
276 # ValueError is raised on Windows if the drive letters differ on
284 # ValueError is raised on Windows if the drive letters differ on
277 # each path
285 # each path
278 raise error.Abort(_('cannot calculate relative path'),
286 raise error.Abort(_('cannot calculate relative path'),
279 hint=stringutil.forcebytestr(e))
287 hint=stringutil.forcebytestr(e))
280 else:
288 else:
281 requirements += 'shared\n'
289 requirements += 'shared\n'
282
290
283 destvfs.write('requires', requirements)
291 destvfs.write('requires', requirements)
284 destvfs.write('sharedpath', sharedpath)
292 destvfs.write('sharedpath', sharedpath)
285
293
286 r = repository(ui, destwvfs.base)
294 r = repository(ui, destwvfs.base)
287 postshare(srcrepo, r, bookmarks=bookmarks, defaultpath=defaultpath)
295 postshare(srcrepo, r, bookmarks=bookmarks, defaultpath=defaultpath)
288 _postshareupdate(r, update, checkout=checkout)
296 _postshareupdate(r, update, checkout=checkout)
289 return r
297 return r
290
298
291 def unshare(ui, repo):
299 def unshare(ui, repo):
292 """convert a shared repository to a normal one
300 """convert a shared repository to a normal one
293
301
294 Copy the store data to the repo and remove the sharedpath data.
302 Copy the store data to the repo and remove the sharedpath data.
295 """
303 """
296
304
297 destlock = lock = None
305 destlock = lock = None
298 lock = repo.lock()
306 lock = repo.lock()
299 try:
307 try:
300 # we use locks here because if we race with commit, we
308 # we use locks here because if we race with commit, we
301 # can end up with extra data in the cloned revlogs that's
309 # can end up with extra data in the cloned revlogs that's
302 # not pointed to by changesets, thus causing verify to
310 # not pointed to by changesets, thus causing verify to
303 # fail
311 # fail
304
312
305 destlock = copystore(ui, repo, repo.path)
313 destlock = copystore(ui, repo, repo.path)
306
314
307 sharefile = repo.vfs.join('sharedpath')
315 sharefile = repo.vfs.join('sharedpath')
308 util.rename(sharefile, sharefile + '.old')
316 util.rename(sharefile, sharefile + '.old')
309
317
310 repo.requirements.discard('shared')
318 repo.requirements.discard('shared')
311 repo.requirements.discard('relshared')
319 repo.requirements.discard('relshared')
312 repo._writerequirements()
320 repo._writerequirements()
313 finally:
321 finally:
314 destlock and destlock.release()
322 destlock and destlock.release()
315 lock and lock.release()
323 lock and lock.release()
316
324
317 # update store, spath, svfs and sjoin of repo
325 # update store, spath, svfs and sjoin of repo
318 repo.unfiltered().__init__(repo.baseui, repo.root)
326 repo.unfiltered().__init__(repo.baseui, repo.root)
319
327
320 # TODO: figure out how to access subrepos that exist, but were previously
328 # TODO: figure out how to access subrepos that exist, but were previously
321 # removed from .hgsub
329 # removed from .hgsub
322 c = repo['.']
330 c = repo['.']
323 subs = c.substate
331 subs = c.substate
324 for s in sorted(subs):
332 for s in sorted(subs):
325 c.sub(s).unshare()
333 c.sub(s).unshare()
326
334
327 def postshare(sourcerepo, destrepo, bookmarks=True, defaultpath=None):
335 def postshare(sourcerepo, destrepo, bookmarks=True, defaultpath=None):
328 """Called after a new shared repo is created.
336 """Called after a new shared repo is created.
329
337
330 The new repo only has a requirements file and pointer to the source.
338 The new repo only has a requirements file and pointer to the source.
331 This function configures additional shared data.
339 This function configures additional shared data.
332
340
333 Extensions can wrap this function and write additional entries to
341 Extensions can wrap this function and write additional entries to
334 destrepo/.hg/shared to indicate additional pieces of data to be shared.
342 destrepo/.hg/shared to indicate additional pieces of data to be shared.
335 """
343 """
336 default = defaultpath or sourcerepo.ui.config('paths', 'default')
344 default = defaultpath or sourcerepo.ui.config('paths', 'default')
337 if default:
345 if default:
338 template = ('[paths]\n'
346 template = ('[paths]\n'
339 'default = %s\n')
347 'default = %s\n')
340 destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
348 destrepo.vfs.write('hgrc', util.tonativeeol(template % default))
341
349
342 with destrepo.wlock():
350 with destrepo.wlock():
343 if bookmarks:
351 if bookmarks:
344 destrepo.vfs.write('shared', sharedbookmarks + '\n')
352 destrepo.vfs.write('shared', sharedbookmarks + '\n')
345
353
346 def _postshareupdate(repo, update, checkout=None):
354 def _postshareupdate(repo, update, checkout=None):
347 """Maybe perform a working directory update after a shared repo is created.
355 """Maybe perform a working directory update after a shared repo is created.
348
356
349 ``update`` can be a boolean or a revision to update to.
357 ``update`` can be a boolean or a revision to update to.
350 """
358 """
351 if not update:
359 if not update:
352 return
360 return
353
361
354 repo.ui.status(_("updating working directory\n"))
362 repo.ui.status(_("updating working directory\n"))
355 if update is not True:
363 if update is not True:
356 checkout = update
364 checkout = update
357 for test in (checkout, 'default', 'tip'):
365 for test in (checkout, 'default', 'tip'):
358 if test is None:
366 if test is None:
359 continue
367 continue
360 try:
368 try:
361 uprev = repo.lookup(test)
369 uprev = repo.lookup(test)
362 break
370 break
363 except error.RepoLookupError:
371 except error.RepoLookupError:
364 continue
372 continue
365 _update(repo, uprev)
373 _update(repo, uprev)
366
374
367 def copystore(ui, srcrepo, destpath):
375 def copystore(ui, srcrepo, destpath):
368 '''copy files from store of srcrepo in destpath
376 '''copy files from store of srcrepo in destpath
369
377
370 returns destlock
378 returns destlock
371 '''
379 '''
372 destlock = None
380 destlock = None
373 try:
381 try:
374 hardlink = None
382 hardlink = None
375 topic = _('linking') if hardlink else _('copying')
383 topic = _('linking') if hardlink else _('copying')
376 progress = ui.makeprogress(topic)
384 progress = ui.makeprogress(topic)
377 num = 0
385 num = 0
378 srcpublishing = srcrepo.publishing()
386 srcpublishing = srcrepo.publishing()
379 srcvfs = vfsmod.vfs(srcrepo.sharedpath)
387 srcvfs = vfsmod.vfs(srcrepo.sharedpath)
380 dstvfs = vfsmod.vfs(destpath)
388 dstvfs = vfsmod.vfs(destpath)
381 for f in srcrepo.store.copylist():
389 for f in srcrepo.store.copylist():
382 if srcpublishing and f.endswith('phaseroots'):
390 if srcpublishing and f.endswith('phaseroots'):
383 continue
391 continue
384 dstbase = os.path.dirname(f)
392 dstbase = os.path.dirname(f)
385 if dstbase and not dstvfs.exists(dstbase):
393 if dstbase and not dstvfs.exists(dstbase):
386 dstvfs.mkdir(dstbase)
394 dstvfs.mkdir(dstbase)
387 if srcvfs.exists(f):
395 if srcvfs.exists(f):
388 if f.endswith('data'):
396 if f.endswith('data'):
389 # 'dstbase' may be empty (e.g. revlog format 0)
397 # 'dstbase' may be empty (e.g. revlog format 0)
390 lockfile = os.path.join(dstbase, "lock")
398 lockfile = os.path.join(dstbase, "lock")
391 # lock to avoid premature writing to the target
399 # lock to avoid premature writing to the target
392 destlock = lock.lock(dstvfs, lockfile)
400 destlock = lock.lock(dstvfs, lockfile)
393 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
401 hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f),
394 hardlink, progress)
402 hardlink, progress)
395 num += n
403 num += n
396 if hardlink:
404 if hardlink:
397 ui.debug("linked %d files\n" % num)
405 ui.debug("linked %d files\n" % num)
398 else:
406 else:
399 ui.debug("copied %d files\n" % num)
407 ui.debug("copied %d files\n" % num)
400 progress.complete()
408 progress.complete()
401 return destlock
409 return destlock
402 except: # re-raises
410 except: # re-raises
403 release(destlock)
411 release(destlock)
404 raise
412 raise
405
413
406 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
414 def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False,
407 rev=None, update=True, stream=False):
415 rev=None, update=True, stream=False):
408 """Perform a clone using a shared repo.
416 """Perform a clone using a shared repo.
409
417
410 The store for the repository will be located at <sharepath>/.hg. The
418 The store for the repository will be located at <sharepath>/.hg. The
411 specified revisions will be cloned or pulled from "source". A shared repo
419 specified revisions will be cloned or pulled from "source". A shared repo
412 will be created at "dest" and a working copy will be created if "update" is
420 will be created at "dest" and a working copy will be created if "update" is
413 True.
421 True.
414 """
422 """
415 revs = None
423 revs = None
416 if rev:
424 if rev:
417 if not srcpeer.capable('lookup'):
425 if not srcpeer.capable('lookup'):
418 raise error.Abort(_("src repository does not support "
426 raise error.Abort(_("src repository does not support "
419 "revision lookup and so doesn't "
427 "revision lookup and so doesn't "
420 "support clone by revision"))
428 "support clone by revision"))
421
429
422 # TODO this is batchable.
430 # TODO this is batchable.
423 remoterevs = []
431 remoterevs = []
424 for r in rev:
432 for r in rev:
425 with srcpeer.commandexecutor() as e:
433 with srcpeer.commandexecutor() as e:
426 remoterevs.append(e.callcommand('lookup', {
434 remoterevs.append(e.callcommand('lookup', {
427 'key': r,
435 'key': r,
428 }).result())
436 }).result())
429 revs = remoterevs
437 revs = remoterevs
430
438
431 # Obtain a lock before checking for or cloning the pooled repo otherwise
439 # Obtain a lock before checking for or cloning the pooled repo otherwise
432 # 2 clients may race creating or populating it.
440 # 2 clients may race creating or populating it.
433 pooldir = os.path.dirname(sharepath)
441 pooldir = os.path.dirname(sharepath)
434 # lock class requires the directory to exist.
442 # lock class requires the directory to exist.
435 try:
443 try:
436 util.makedir(pooldir, False)
444 util.makedir(pooldir, False)
437 except OSError as e:
445 except OSError as e:
438 if e.errno != errno.EEXIST:
446 if e.errno != errno.EEXIST:
439 raise
447 raise
440
448
441 poolvfs = vfsmod.vfs(pooldir)
449 poolvfs = vfsmod.vfs(pooldir)
442 basename = os.path.basename(sharepath)
450 basename = os.path.basename(sharepath)
443
451
444 with lock.lock(poolvfs, '%s.lock' % basename):
452 with lock.lock(poolvfs, '%s.lock' % basename):
445 if os.path.exists(sharepath):
453 if os.path.exists(sharepath):
446 ui.status(_('(sharing from existing pooled repository %s)\n') %
454 ui.status(_('(sharing from existing pooled repository %s)\n') %
447 basename)
455 basename)
448 else:
456 else:
449 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
457 ui.status(_('(sharing from new pooled repository %s)\n') % basename)
450 # Always use pull mode because hardlinks in share mode don't work
458 # Always use pull mode because hardlinks in share mode don't work
451 # well. Never update because working copies aren't necessary in
459 # well. Never update because working copies aren't necessary in
452 # share mode.
460 # share mode.
453 clone(ui, peeropts, source, dest=sharepath, pull=True,
461 clone(ui, peeropts, source, dest=sharepath, pull=True,
454 revs=rev, update=False, stream=stream)
462 revs=rev, update=False, stream=stream)
455
463
456 # Resolve the value to put in [paths] section for the source.
464 # Resolve the value to put in [paths] section for the source.
457 if islocal(source):
465 if islocal(source):
458 defaultpath = os.path.abspath(util.urllocalpath(source))
466 defaultpath = os.path.abspath(util.urllocalpath(source))
459 else:
467 else:
460 defaultpath = source
468 defaultpath = source
461
469
462 sharerepo = repository(ui, path=sharepath)
470 sharerepo = repository(ui, path=sharepath)
463 share(ui, sharerepo, dest=dest, update=False, bookmarks=False,
471 share(ui, sharerepo, dest=dest, update=False, bookmarks=False,
464 defaultpath=defaultpath)
472 defaultpath=defaultpath)
465
473
466 # We need to perform a pull against the dest repo to fetch bookmarks
474 # We need to perform a pull against the dest repo to fetch bookmarks
467 # and other non-store data that isn't shared by default. In the case of
475 # and other non-store data that isn't shared by default. In the case of
468 # non-existing shared repo, this means we pull from the remote twice. This
476 # non-existing shared repo, this means we pull from the remote twice. This
469 # is a bit weird. But at the time it was implemented, there wasn't an easy
477 # is a bit weird. But at the time it was implemented, there wasn't an easy
470 # way to pull just non-changegroup data.
478 # way to pull just non-changegroup data.
471 destrepo = repository(ui, path=dest)
479 destrepo = repository(ui, path=dest)
472 exchange.pull(destrepo, srcpeer, heads=revs)
480 exchange.pull(destrepo, srcpeer, heads=revs)
473
481
474 _postshareupdate(destrepo, update)
482 _postshareupdate(destrepo, update)
475
483
476 return srcpeer, peer(ui, peeropts, dest)
484 return srcpeer, peer(ui, peeropts, dest)
477
485
478 # Recomputing branch cache might be slow on big repos,
486 # Recomputing branch cache might be slow on big repos,
479 # so just copy it
487 # so just copy it
480 def _copycache(srcrepo, dstcachedir, fname):
488 def _copycache(srcrepo, dstcachedir, fname):
481 """copy a cache from srcrepo to destcachedir (if it exists)"""
489 """copy a cache from srcrepo to destcachedir (if it exists)"""
482 srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
490 srcbranchcache = srcrepo.vfs.join('cache/%s' % fname)
483 dstbranchcache = os.path.join(dstcachedir, fname)
491 dstbranchcache = os.path.join(dstcachedir, fname)
484 if os.path.exists(srcbranchcache):
492 if os.path.exists(srcbranchcache):
485 if not os.path.exists(dstcachedir):
493 if not os.path.exists(dstcachedir):
486 os.mkdir(dstcachedir)
494 os.mkdir(dstcachedir)
487 util.copyfile(srcbranchcache, dstbranchcache)
495 util.copyfile(srcbranchcache, dstbranchcache)
488
496
489 def clone(ui, peeropts, source, dest=None, pull=False, revs=None,
497 def clone(ui, peeropts, source, dest=None, pull=False, revs=None,
490 update=True, stream=False, branch=None, shareopts=None):
498 update=True, stream=False, branch=None, shareopts=None):
491 """Make a copy of an existing repository.
499 """Make a copy of an existing repository.
492
500
493 Create a copy of an existing repository in a new directory. The
501 Create a copy of an existing repository in a new directory. The
494 source and destination are URLs, as passed to the repository
502 source and destination are URLs, as passed to the repository
495 function. Returns a pair of repository peers, the source and
503 function. Returns a pair of repository peers, the source and
496 newly created destination.
504 newly created destination.
497
505
498 The location of the source is added to the new repository's
506 The location of the source is added to the new repository's
499 .hg/hgrc file, as the default to be used for future pulls and
507 .hg/hgrc file, as the default to be used for future pulls and
500 pushes.
508 pushes.
501
509
502 If an exception is raised, the partly cloned/updated destination
510 If an exception is raised, the partly cloned/updated destination
503 repository will be deleted.
511 repository will be deleted.
504
512
505 Arguments:
513 Arguments:
506
514
507 source: repository object or URL
515 source: repository object or URL
508
516
509 dest: URL of destination repository to create (defaults to base
517 dest: URL of destination repository to create (defaults to base
510 name of source repository)
518 name of source repository)
511
519
512 pull: always pull from source repository, even in local case or if the
520 pull: always pull from source repository, even in local case or if the
513 server prefers streaming
521 server prefers streaming
514
522
515 stream: stream raw data uncompressed from repository (fast over
523 stream: stream raw data uncompressed from repository (fast over
516 LAN, slow over WAN)
524 LAN, slow over WAN)
517
525
518 revs: revision to clone up to (implies pull=True)
526 revs: revision to clone up to (implies pull=True)
519
527
520 update: update working directory after clone completes, if
528 update: update working directory after clone completes, if
521 destination is local repository (True means update to default rev,
529 destination is local repository (True means update to default rev,
522 anything else is treated as a revision)
530 anything else is treated as a revision)
523
531
524 branch: branches to clone
532 branch: branches to clone
525
533
526 shareopts: dict of options to control auto sharing behavior. The "pool" key
534 shareopts: dict of options to control auto sharing behavior. The "pool" key
527 activates auto sharing mode and defines the directory for stores. The
535 activates auto sharing mode and defines the directory for stores. The
528 "mode" key determines how to construct the directory name of the shared
536 "mode" key determines how to construct the directory name of the shared
529 repository. "identity" means the name is derived from the node of the first
537 repository. "identity" means the name is derived from the node of the first
530 changeset in the repository. "remote" means the name is derived from the
538 changeset in the repository. "remote" means the name is derived from the
531 remote's path/URL. Defaults to "identity."
539 remote's path/URL. Defaults to "identity."
532 """
540 """
533
541
534 if isinstance(source, bytes):
542 if isinstance(source, bytes):
535 origsource = ui.expandpath(source)
543 origsource = ui.expandpath(source)
536 source, branches = parseurl(origsource, branch)
544 source, branches = parseurl(origsource, branch)
537 srcpeer = peer(ui, peeropts, source)
545 srcpeer = peer(ui, peeropts, source)
538 else:
546 else:
539 srcpeer = source.peer() # in case we were called with a localrepo
547 srcpeer = source.peer() # in case we were called with a localrepo
540 branches = (None, branch or [])
548 branches = (None, branch or [])
541 origsource = source = srcpeer.url()
549 origsource = source = srcpeer.url()
542 revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
550 revs, checkout = addbranchrevs(srcpeer, srcpeer, branches, revs)
543
551
544 if dest is None:
552 if dest is None:
545 dest = defaultdest(source)
553 dest = defaultdest(source)
546 if dest:
554 if dest:
547 ui.status(_("destination directory: %s\n") % dest)
555 ui.status(_("destination directory: %s\n") % dest)
548 else:
556 else:
549 dest = ui.expandpath(dest)
557 dest = ui.expandpath(dest)
550
558
551 dest = util.urllocalpath(dest)
559 dest = util.urllocalpath(dest)
552 source = util.urllocalpath(source)
560 source = util.urllocalpath(source)
553
561
554 if not dest:
562 if not dest:
555 raise error.Abort(_("empty destination path is not valid"))
563 raise error.Abort(_("empty destination path is not valid"))
556
564
557 destvfs = vfsmod.vfs(dest, expandpath=True)
565 destvfs = vfsmod.vfs(dest, expandpath=True)
558 if destvfs.lexists():
566 if destvfs.lexists():
559 if not destvfs.isdir():
567 if not destvfs.isdir():
560 raise error.Abort(_("destination '%s' already exists") % dest)
568 raise error.Abort(_("destination '%s' already exists") % dest)
561 elif destvfs.listdir():
569 elif destvfs.listdir():
562 raise error.Abort(_("destination '%s' is not empty") % dest)
570 raise error.Abort(_("destination '%s' is not empty") % dest)
563
571
564 shareopts = shareopts or {}
572 shareopts = shareopts or {}
565 sharepool = shareopts.get('pool')
573 sharepool = shareopts.get('pool')
566 sharenamemode = shareopts.get('mode')
574 sharenamemode = shareopts.get('mode')
567 if sharepool and islocal(dest):
575 if sharepool and islocal(dest):
568 sharepath = None
576 sharepath = None
569 if sharenamemode == 'identity':
577 if sharenamemode == 'identity':
570 # Resolve the name from the initial changeset in the remote
578 # Resolve the name from the initial changeset in the remote
571 # repository. This returns nullid when the remote is empty. It
579 # repository. This returns nullid when the remote is empty. It
572 # raises RepoLookupError if revision 0 is filtered or otherwise
580 # raises RepoLookupError if revision 0 is filtered or otherwise
573 # not available. If we fail to resolve, sharing is not enabled.
581 # not available. If we fail to resolve, sharing is not enabled.
574 try:
582 try:
575 with srcpeer.commandexecutor() as e:
583 with srcpeer.commandexecutor() as e:
576 rootnode = e.callcommand('lookup', {
584 rootnode = e.callcommand('lookup', {
577 'key': '0',
585 'key': '0',
578 }).result()
586 }).result()
579
587
580 if rootnode != node.nullid:
588 if rootnode != node.nullid:
581 sharepath = os.path.join(sharepool, node.hex(rootnode))
589 sharepath = os.path.join(sharepool, node.hex(rootnode))
582 else:
590 else:
583 ui.status(_('(not using pooled storage: '
591 ui.status(_('(not using pooled storage: '
584 'remote appears to be empty)\n'))
592 'remote appears to be empty)\n'))
585 except error.RepoLookupError:
593 except error.RepoLookupError:
586 ui.status(_('(not using pooled storage: '
594 ui.status(_('(not using pooled storage: '
587 'unable to resolve identity of remote)\n'))
595 'unable to resolve identity of remote)\n'))
588 elif sharenamemode == 'remote':
596 elif sharenamemode == 'remote':
589 sharepath = os.path.join(
597 sharepath = os.path.join(
590 sharepool, node.hex(hashlib.sha1(source).digest()))
598 sharepool, node.hex(hashlib.sha1(source).digest()))
591 else:
599 else:
592 raise error.Abort(_('unknown share naming mode: %s') %
600 raise error.Abort(_('unknown share naming mode: %s') %
593 sharenamemode)
601 sharenamemode)
594
602
595 if sharepath:
603 if sharepath:
596 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
604 return clonewithshare(ui, peeropts, sharepath, source, srcpeer,
597 dest, pull=pull, rev=revs, update=update,
605 dest, pull=pull, rev=revs, update=update,
598 stream=stream)
606 stream=stream)
599
607
600 srclock = destlock = cleandir = None
608 srclock = destlock = cleandir = None
601 srcrepo = srcpeer.local()
609 srcrepo = srcpeer.local()
602 try:
610 try:
603 abspath = origsource
611 abspath = origsource
604 if islocal(origsource):
612 if islocal(origsource):
605 abspath = os.path.abspath(util.urllocalpath(origsource))
613 abspath = os.path.abspath(util.urllocalpath(origsource))
606
614
607 if islocal(dest):
615 if islocal(dest):
608 cleandir = dest
616 cleandir = dest
609
617
610 copy = False
618 copy = False
611 if (srcrepo and srcrepo.cancopy() and islocal(dest)
619 if (srcrepo and srcrepo.cancopy() and islocal(dest)
612 and not phases.hassecret(srcrepo)):
620 and not phases.hassecret(srcrepo)):
613 copy = not pull and not revs
621 copy = not pull and not revs
614
622
615 if copy:
623 if copy:
616 try:
624 try:
617 # we use a lock here because if we race with commit, we
625 # we use a lock here because if we race with commit, we
618 # can end up with extra data in the cloned revlogs that's
626 # can end up with extra data in the cloned revlogs that's
619 # not pointed to by changesets, thus causing verify to
627 # not pointed to by changesets, thus causing verify to
620 # fail
628 # fail
621 srclock = srcrepo.lock(wait=False)
629 srclock = srcrepo.lock(wait=False)
622 except error.LockError:
630 except error.LockError:
623 copy = False
631 copy = False
624
632
625 if copy:
633 if copy:
626 srcrepo.hook('preoutgoing', throw=True, source='clone')
634 srcrepo.hook('preoutgoing', throw=True, source='clone')
627 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
635 hgdir = os.path.realpath(os.path.join(dest, ".hg"))
628 if not os.path.exists(dest):
636 if not os.path.exists(dest):
629 os.mkdir(dest)
637 os.mkdir(dest)
630 else:
638 else:
631 # only clean up directories we create ourselves
639 # only clean up directories we create ourselves
632 cleandir = hgdir
640 cleandir = hgdir
633 try:
641 try:
634 destpath = hgdir
642 destpath = hgdir
635 util.makedir(destpath, notindexed=True)
643 util.makedir(destpath, notindexed=True)
636 except OSError as inst:
644 except OSError as inst:
637 if inst.errno == errno.EEXIST:
645 if inst.errno == errno.EEXIST:
638 cleandir = None
646 cleandir = None
639 raise error.Abort(_("destination '%s' already exists")
647 raise error.Abort(_("destination '%s' already exists")
640 % dest)
648 % dest)
641 raise
649 raise
642
650
643 destlock = copystore(ui, srcrepo, destpath)
651 destlock = copystore(ui, srcrepo, destpath)
644 # copy bookmarks over
652 # copy bookmarks over
645 srcbookmarks = srcrepo.vfs.join('bookmarks')
653 srcbookmarks = srcrepo.vfs.join('bookmarks')
646 dstbookmarks = os.path.join(destpath, 'bookmarks')
654 dstbookmarks = os.path.join(destpath, 'bookmarks')
647 if os.path.exists(srcbookmarks):
655 if os.path.exists(srcbookmarks):
648 util.copyfile(srcbookmarks, dstbookmarks)
656 util.copyfile(srcbookmarks, dstbookmarks)
649
657
650 dstcachedir = os.path.join(destpath, 'cache')
658 dstcachedir = os.path.join(destpath, 'cache')
651 for cache in cacheutil.cachetocopy(srcrepo):
659 for cache in cacheutil.cachetocopy(srcrepo):
652 _copycache(srcrepo, dstcachedir, cache)
660 _copycache(srcrepo, dstcachedir, cache)
653
661
654 # we need to re-init the repo after manually copying the data
662 # we need to re-init the repo after manually copying the data
655 # into it
663 # into it
656 destpeer = peer(srcrepo, peeropts, dest)
664 destpeer = peer(srcrepo, peeropts, dest)
657 srcrepo.hook('outgoing', source='clone',
665 srcrepo.hook('outgoing', source='clone',
658 node=node.hex(node.nullid))
666 node=node.hex(node.nullid))
659 else:
667 else:
660 try:
668 try:
661 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
669 destpeer = peer(srcrepo or ui, peeropts, dest, create=True)
662 # only pass ui when no srcrepo
670 # only pass ui when no srcrepo
663 except OSError as inst:
671 except OSError as inst:
664 if inst.errno == errno.EEXIST:
672 if inst.errno == errno.EEXIST:
665 cleandir = None
673 cleandir = None
666 raise error.Abort(_("destination '%s' already exists")
674 raise error.Abort(_("destination '%s' already exists")
667 % dest)
675 % dest)
668 raise
676 raise
669
677
670 if revs:
678 if revs:
671 if not srcpeer.capable('lookup'):
679 if not srcpeer.capable('lookup'):
672 raise error.Abort(_("src repository does not support "
680 raise error.Abort(_("src repository does not support "
673 "revision lookup and so doesn't "
681 "revision lookup and so doesn't "
674 "support clone by revision"))
682 "support clone by revision"))
675
683
676 # TODO this is batchable.
684 # TODO this is batchable.
677 remoterevs = []
685 remoterevs = []
678 for rev in revs:
686 for rev in revs:
679 with srcpeer.commandexecutor() as e:
687 with srcpeer.commandexecutor() as e:
680 remoterevs.append(e.callcommand('lookup', {
688 remoterevs.append(e.callcommand('lookup', {
681 'key': rev,
689 'key': rev,
682 }).result())
690 }).result())
683 revs = remoterevs
691 revs = remoterevs
684
692
685 checkout = revs[0]
693 checkout = revs[0]
686 else:
694 else:
687 revs = None
695 revs = None
688 local = destpeer.local()
696 local = destpeer.local()
689 if local:
697 if local:
690 u = util.url(abspath)
698 u = util.url(abspath)
691 defaulturl = bytes(u)
699 defaulturl = bytes(u)
692 local.ui.setconfig('paths', 'default', defaulturl, 'clone')
700 local.ui.setconfig('paths', 'default', defaulturl, 'clone')
693 if not stream:
701 if not stream:
694 if pull:
702 if pull:
695 stream = False
703 stream = False
696 else:
704 else:
697 stream = None
705 stream = None
698 # internal config: ui.quietbookmarkmove
706 # internal config: ui.quietbookmarkmove
699 overrides = {('ui', 'quietbookmarkmove'): True}
707 overrides = {('ui', 'quietbookmarkmove'): True}
700 with local.ui.configoverride(overrides, 'clone'):
708 with local.ui.configoverride(overrides, 'clone'):
701 exchange.pull(local, srcpeer, revs,
709 exchange.pull(local, srcpeer, revs,
702 streamclonerequested=stream)
710 streamclonerequested=stream)
703 elif srcrepo:
711 elif srcrepo:
704 exchange.push(srcrepo, destpeer, revs=revs,
712 exchange.push(srcrepo, destpeer, revs=revs,
705 bookmarks=srcrepo._bookmarks.keys())
713 bookmarks=srcrepo._bookmarks.keys())
706 else:
714 else:
707 raise error.Abort(_("clone from remote to remote not supported")
715 raise error.Abort(_("clone from remote to remote not supported")
708 )
716 )
709
717
710 cleandir = None
718 cleandir = None
711
719
712 destrepo = destpeer.local()
720 destrepo = destpeer.local()
713 if destrepo:
721 if destrepo:
714 template = uimod.samplehgrcs['cloned']
722 template = uimod.samplehgrcs['cloned']
715 u = util.url(abspath)
723 u = util.url(abspath)
716 u.passwd = None
724 u.passwd = None
717 defaulturl = bytes(u)
725 defaulturl = bytes(u)
718 destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
726 destrepo.vfs.write('hgrc', util.tonativeeol(template % defaulturl))
719 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
727 destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone')
720
728
721 if ui.configbool('experimental', 'remotenames'):
729 if ui.configbool('experimental', 'remotenames'):
722 logexchange.pullremotenames(destrepo, srcpeer)
730 logexchange.pullremotenames(destrepo, srcpeer)
723
731
724 if update:
732 if update:
725 if update is not True:
733 if update is not True:
726 with srcpeer.commandexecutor() as e:
734 with srcpeer.commandexecutor() as e:
727 checkout = e.callcommand('lookup', {
735 checkout = e.callcommand('lookup', {
728 'key': update,
736 'key': update,
729 }).result()
737 }).result()
730
738
731 uprev = None
739 uprev = None
732 status = None
740 status = None
733 if checkout is not None:
741 if checkout is not None:
734 # Some extensions (at least hg-git and hg-subversion) have
742 # Some extensions (at least hg-git and hg-subversion) have
735 # a peer.lookup() implementation that returns a name instead
743 # a peer.lookup() implementation that returns a name instead
736 # of a nodeid. We work around it here until we've figured
744 # of a nodeid. We work around it here until we've figured
737 # out a better solution.
745 # out a better solution.
738 if len(checkout) == 20 and checkout in destrepo:
746 if len(checkout) == 20 and checkout in destrepo:
739 uprev = checkout
747 uprev = checkout
740 elif scmutil.isrevsymbol(destrepo, checkout):
748 elif scmutil.isrevsymbol(destrepo, checkout):
741 uprev = scmutil.revsymbol(destrepo, checkout).node()
749 uprev = scmutil.revsymbol(destrepo, checkout).node()
742 else:
750 else:
743 if update is not True:
751 if update is not True:
744 try:
752 try:
745 uprev = destrepo.lookup(update)
753 uprev = destrepo.lookup(update)
746 except error.RepoLookupError:
754 except error.RepoLookupError:
747 pass
755 pass
748 if uprev is None:
756 if uprev is None:
749 try:
757 try:
750 uprev = destrepo._bookmarks['@']
758 uprev = destrepo._bookmarks['@']
751 update = '@'
759 update = '@'
752 bn = destrepo[uprev].branch()
760 bn = destrepo[uprev].branch()
753 if bn == 'default':
761 if bn == 'default':
754 status = _("updating to bookmark @\n")
762 status = _("updating to bookmark @\n")
755 else:
763 else:
756 status = (_("updating to bookmark @ on branch %s\n")
764 status = (_("updating to bookmark @ on branch %s\n")
757 % bn)
765 % bn)
758 except KeyError:
766 except KeyError:
759 try:
767 try:
760 uprev = destrepo.branchtip('default')
768 uprev = destrepo.branchtip('default')
761 except error.RepoLookupError:
769 except error.RepoLookupError:
762 uprev = destrepo.lookup('tip')
770 uprev = destrepo.lookup('tip')
763 if not status:
771 if not status:
764 bn = destrepo[uprev].branch()
772 bn = destrepo[uprev].branch()
765 status = _("updating to branch %s\n") % bn
773 status = _("updating to branch %s\n") % bn
766 destrepo.ui.status(status)
774 destrepo.ui.status(status)
767 _update(destrepo, uprev)
775 _update(destrepo, uprev)
768 if update in destrepo._bookmarks:
776 if update in destrepo._bookmarks:
769 bookmarks.activate(destrepo, update)
777 bookmarks.activate(destrepo, update)
770 finally:
778 finally:
771 release(srclock, destlock)
779 release(srclock, destlock)
772 if cleandir is not None:
780 if cleandir is not None:
773 shutil.rmtree(cleandir, True)
781 shutil.rmtree(cleandir, True)
774 if srcpeer is not None:
782 if srcpeer is not None:
775 srcpeer.close()
783 srcpeer.close()
776 return srcpeer, destpeer
784 return srcpeer, destpeer
777
785
778 def _showstats(repo, stats, quietempty=False):
786 def _showstats(repo, stats, quietempty=False):
779 if quietempty and stats.isempty():
787 if quietempty and stats.isempty():
780 return
788 return
781 repo.ui.status(_("%d files updated, %d files merged, "
789 repo.ui.status(_("%d files updated, %d files merged, "
782 "%d files removed, %d files unresolved\n") % (
790 "%d files removed, %d files unresolved\n") % (
783 stats.updatedcount, stats.mergedcount,
791 stats.updatedcount, stats.mergedcount,
784 stats.removedcount, stats.unresolvedcount))
792 stats.removedcount, stats.unresolvedcount))
785
793
786 def updaterepo(repo, node, overwrite, updatecheck=None):
794 def updaterepo(repo, node, overwrite, updatecheck=None):
787 """Update the working directory to node.
795 """Update the working directory to node.
788
796
789 When overwrite is set, changes are clobbered, merged else
797 When overwrite is set, changes are clobbered, merged else
790
798
791 returns stats (see pydoc mercurial.merge.applyupdates)"""
799 returns stats (see pydoc mercurial.merge.applyupdates)"""
792 return mergemod.update(repo, node, False, overwrite,
800 return mergemod.update(repo, node, False, overwrite,
793 labels=['working copy', 'destination'],
801 labels=['working copy', 'destination'],
794 updatecheck=updatecheck)
802 updatecheck=updatecheck)
795
803
796 def update(repo, node, quietempty=False, updatecheck=None):
804 def update(repo, node, quietempty=False, updatecheck=None):
797 """update the working directory to node"""
805 """update the working directory to node"""
798 stats = updaterepo(repo, node, False, updatecheck=updatecheck)
806 stats = updaterepo(repo, node, False, updatecheck=updatecheck)
799 _showstats(repo, stats, quietempty)
807 _showstats(repo, stats, quietempty)
800 if stats.unresolvedcount:
808 if stats.unresolvedcount:
801 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
809 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n"))
802 return stats.unresolvedcount > 0
810 return stats.unresolvedcount > 0
803
811
804 # naming conflict in clone()
812 # naming conflict in clone()
805 _update = update
813 _update = update
806
814
807 def clean(repo, node, show_stats=True, quietempty=False):
815 def clean(repo, node, show_stats=True, quietempty=False):
808 """forcibly switch the working directory to node, clobbering changes"""
816 """forcibly switch the working directory to node, clobbering changes"""
809 stats = updaterepo(repo, node, True)
817 stats = updaterepo(repo, node, True)
810 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
818 repo.vfs.unlinkpath('graftstate', ignoremissing=True)
811 if show_stats:
819 if show_stats:
812 _showstats(repo, stats, quietempty)
820 _showstats(repo, stats, quietempty)
813 return stats.unresolvedcount > 0
821 return stats.unresolvedcount > 0
814
822
815 # naming conflict in updatetotally()
823 # naming conflict in updatetotally()
816 _clean = clean
824 _clean = clean
817
825
818 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
826 def updatetotally(ui, repo, checkout, brev, clean=False, updatecheck=None):
819 """Update the working directory with extra care for non-file components
827 """Update the working directory with extra care for non-file components
820
828
821 This takes care of non-file components below:
829 This takes care of non-file components below:
822
830
823 :bookmark: might be advanced or (in)activated
831 :bookmark: might be advanced or (in)activated
824
832
825 This takes arguments below:
833 This takes arguments below:
826
834
827 :checkout: to which revision the working directory is updated
835 :checkout: to which revision the working directory is updated
828 :brev: a name, which might be a bookmark to be activated after updating
836 :brev: a name, which might be a bookmark to be activated after updating
829 :clean: whether changes in the working directory can be discarded
837 :clean: whether changes in the working directory can be discarded
830 :updatecheck: how to deal with a dirty working directory
838 :updatecheck: how to deal with a dirty working directory
831
839
832 Valid values for updatecheck are (None => linear):
840 Valid values for updatecheck are (None => linear):
833
841
834 * abort: abort if the working directory is dirty
842 * abort: abort if the working directory is dirty
835 * none: don't check (merge working directory changes into destination)
843 * none: don't check (merge working directory changes into destination)
836 * linear: check that update is linear before merging working directory
844 * linear: check that update is linear before merging working directory
837 changes into destination
845 changes into destination
838 * noconflict: check that the update does not result in file merges
846 * noconflict: check that the update does not result in file merges
839
847
840 This returns whether conflict is detected at updating or not.
848 This returns whether conflict is detected at updating or not.
841 """
849 """
842 if updatecheck is None:
850 if updatecheck is None:
843 updatecheck = ui.config('commands', 'update.check')
851 updatecheck = ui.config('commands', 'update.check')
844 if updatecheck not in ('abort', 'none', 'linear', 'noconflict'):
852 if updatecheck not in ('abort', 'none', 'linear', 'noconflict'):
845 # If not configured, or invalid value configured
853 # If not configured, or invalid value configured
846 updatecheck = 'linear'
854 updatecheck = 'linear'
847 with repo.wlock():
855 with repo.wlock():
848 movemarkfrom = None
856 movemarkfrom = None
849 warndest = False
857 warndest = False
850 if checkout is None:
858 if checkout is None:
851 updata = destutil.destupdate(repo, clean=clean)
859 updata = destutil.destupdate(repo, clean=clean)
852 checkout, movemarkfrom, brev = updata
860 checkout, movemarkfrom, brev = updata
853 warndest = True
861 warndest = True
854
862
855 if clean:
863 if clean:
856 ret = _clean(repo, checkout)
864 ret = _clean(repo, checkout)
857 else:
865 else:
858 if updatecheck == 'abort':
866 if updatecheck == 'abort':
859 cmdutil.bailifchanged(repo, merge=False)
867 cmdutil.bailifchanged(repo, merge=False)
860 updatecheck = 'none'
868 updatecheck = 'none'
861 ret = _update(repo, checkout, updatecheck=updatecheck)
869 ret = _update(repo, checkout, updatecheck=updatecheck)
862
870
863 if not ret and movemarkfrom:
871 if not ret and movemarkfrom:
864 if movemarkfrom == repo['.'].node():
872 if movemarkfrom == repo['.'].node():
865 pass # no-op update
873 pass # no-op update
866 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
874 elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
867 b = ui.label(repo._activebookmark, 'bookmarks.active')
875 b = ui.label(repo._activebookmark, 'bookmarks.active')
868 ui.status(_("updating bookmark %s\n") % b)
876 ui.status(_("updating bookmark %s\n") % b)
869 else:
877 else:
870 # this can happen with a non-linear update
878 # this can happen with a non-linear update
871 b = ui.label(repo._activebookmark, 'bookmarks')
879 b = ui.label(repo._activebookmark, 'bookmarks')
872 ui.status(_("(leaving bookmark %s)\n") % b)
880 ui.status(_("(leaving bookmark %s)\n") % b)
873 bookmarks.deactivate(repo)
881 bookmarks.deactivate(repo)
874 elif brev in repo._bookmarks:
882 elif brev in repo._bookmarks:
875 if brev != repo._activebookmark:
883 if brev != repo._activebookmark:
876 b = ui.label(brev, 'bookmarks.active')
884 b = ui.label(brev, 'bookmarks.active')
877 ui.status(_("(activating bookmark %s)\n") % b)
885 ui.status(_("(activating bookmark %s)\n") % b)
878 bookmarks.activate(repo, brev)
886 bookmarks.activate(repo, brev)
879 elif brev:
887 elif brev:
880 if repo._activebookmark:
888 if repo._activebookmark:
881 b = ui.label(repo._activebookmark, 'bookmarks')
889 b = ui.label(repo._activebookmark, 'bookmarks')
882 ui.status(_("(leaving bookmark %s)\n") % b)
890 ui.status(_("(leaving bookmark %s)\n") % b)
883 bookmarks.deactivate(repo)
891 bookmarks.deactivate(repo)
884
892
885 if warndest:
893 if warndest:
886 destutil.statusotherdests(ui, repo)
894 destutil.statusotherdests(ui, repo)
887
895
888 return ret
896 return ret
889
897
890 def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
898 def merge(repo, node, force=None, remind=True, mergeforce=False, labels=None,
891 abort=False):
899 abort=False):
892 """Branch merge with node, resolving changes. Return true if any
900 """Branch merge with node, resolving changes. Return true if any
893 unresolved conflicts."""
901 unresolved conflicts."""
894 if not abort:
902 if not abort:
895 stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce,
903 stats = mergemod.update(repo, node, True, force, mergeforce=mergeforce,
896 labels=labels)
904 labels=labels)
897 else:
905 else:
898 ms = mergemod.mergestate.read(repo)
906 ms = mergemod.mergestate.read(repo)
899 if ms.active():
907 if ms.active():
900 # there were conflicts
908 # there were conflicts
901 node = ms.localctx.hex()
909 node = ms.localctx.hex()
902 else:
910 else:
903 # there were no conficts, mergestate was not stored
911 # there were no conficts, mergestate was not stored
904 node = repo['.'].hex()
912 node = repo['.'].hex()
905
913
906 repo.ui.status(_("aborting the merge, updating back to"
914 repo.ui.status(_("aborting the merge, updating back to"
907 " %s\n") % node[:12])
915 " %s\n") % node[:12])
908 stats = mergemod.update(repo, node, branchmerge=False, force=True,
916 stats = mergemod.update(repo, node, branchmerge=False, force=True,
909 labels=labels)
917 labels=labels)
910
918
911 _showstats(repo, stats)
919 _showstats(repo, stats)
912 if stats.unresolvedcount:
920 if stats.unresolvedcount:
913 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
921 repo.ui.status(_("use 'hg resolve' to retry unresolved file merges "
914 "or 'hg merge --abort' to abandon\n"))
922 "or 'hg merge --abort' to abandon\n"))
915 elif remind and not abort:
923 elif remind and not abort:
916 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
924 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
917 return stats.unresolvedcount > 0
925 return stats.unresolvedcount > 0
918
926
919 def _incoming(displaychlist, subreporecurse, ui, repo, source,
927 def _incoming(displaychlist, subreporecurse, ui, repo, source,
920 opts, buffered=False):
928 opts, buffered=False):
921 """
929 """
922 Helper for incoming / gincoming.
930 Helper for incoming / gincoming.
923 displaychlist gets called with
931 displaychlist gets called with
924 (remoterepo, incomingchangesetlist, displayer) parameters,
932 (remoterepo, incomingchangesetlist, displayer) parameters,
925 and is supposed to contain only code that can't be unified.
933 and is supposed to contain only code that can't be unified.
926 """
934 """
927 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
935 source, branches = parseurl(ui.expandpath(source), opts.get('branch'))
928 other = peer(repo, opts, source)
936 other = peer(repo, opts, source)
929 ui.status(_('comparing with %s\n') % util.hidepassword(source))
937 ui.status(_('comparing with %s\n') % util.hidepassword(source))
930 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
938 revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev'))
931
939
932 if revs:
940 if revs:
933 revs = [other.lookup(rev) for rev in revs]
941 revs = [other.lookup(rev) for rev in revs]
934 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
942 other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other,
935 revs, opts["bundle"], opts["force"])
943 revs, opts["bundle"], opts["force"])
936 try:
944 try:
937 if not chlist:
945 if not chlist:
938 ui.status(_("no changes found\n"))
946 ui.status(_("no changes found\n"))
939 return subreporecurse()
947 return subreporecurse()
940 ui.pager('incoming')
948 ui.pager('incoming')
941 displayer = logcmdutil.changesetdisplayer(ui, other, opts,
949 displayer = logcmdutil.changesetdisplayer(ui, other, opts,
942 buffered=buffered)
950 buffered=buffered)
943 displaychlist(other, chlist, displayer)
951 displaychlist(other, chlist, displayer)
944 displayer.close()
952 displayer.close()
945 finally:
953 finally:
946 cleanupfn()
954 cleanupfn()
947 subreporecurse()
955 subreporecurse()
948 return 0 # exit code is zero since we found incoming changes
956 return 0 # exit code is zero since we found incoming changes
949
957
950 def incoming(ui, repo, source, opts):
958 def incoming(ui, repo, source, opts):
951 def subreporecurse():
959 def subreporecurse():
952 ret = 1
960 ret = 1
953 if opts.get('subrepos'):
961 if opts.get('subrepos'):
954 ctx = repo[None]
962 ctx = repo[None]
955 for subpath in sorted(ctx.substate):
963 for subpath in sorted(ctx.substate):
956 sub = ctx.sub(subpath)
964 sub = ctx.sub(subpath)
957 ret = min(ret, sub.incoming(ui, source, opts))
965 ret = min(ret, sub.incoming(ui, source, opts))
958 return ret
966 return ret
959
967
960 def display(other, chlist, displayer):
968 def display(other, chlist, displayer):
961 limit = logcmdutil.getlimit(opts)
969 limit = logcmdutil.getlimit(opts)
962 if opts.get('newest_first'):
970 if opts.get('newest_first'):
963 chlist.reverse()
971 chlist.reverse()
964 count = 0
972 count = 0
965 for n in chlist:
973 for n in chlist:
966 if limit is not None and count >= limit:
974 if limit is not None and count >= limit:
967 break
975 break
968 parents = [p for p in other.changelog.parents(n) if p != nullid]
976 parents = [p for p in other.changelog.parents(n) if p != nullid]
969 if opts.get('no_merges') and len(parents) == 2:
977 if opts.get('no_merges') and len(parents) == 2:
970 continue
978 continue
971 count += 1
979 count += 1
972 displayer.show(other[n])
980 displayer.show(other[n])
973 return _incoming(display, subreporecurse, ui, repo, source, opts)
981 return _incoming(display, subreporecurse, ui, repo, source, opts)
974
982
975 def _outgoing(ui, repo, dest, opts):
983 def _outgoing(ui, repo, dest, opts):
976 path = ui.paths.getpath(dest, default=('default-push', 'default'))
984 path = ui.paths.getpath(dest, default=('default-push', 'default'))
977 if not path:
985 if not path:
978 raise error.Abort(_('default repository not configured!'),
986 raise error.Abort(_('default repository not configured!'),
979 hint=_("see 'hg help config.paths'"))
987 hint=_("see 'hg help config.paths'"))
980 dest = path.pushloc or path.loc
988 dest = path.pushloc or path.loc
981 branches = path.branch, opts.get('branch') or []
989 branches = path.branch, opts.get('branch') or []
982
990
983 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
991 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
984 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
992 revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
985 if revs:
993 if revs:
986 revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
994 revs = [repo[rev].node() for rev in scmutil.revrange(repo, revs)]
987
995
988 other = peer(repo, opts, dest)
996 other = peer(repo, opts, dest)
989 outgoing = discovery.findcommonoutgoing(repo, other, revs,
997 outgoing = discovery.findcommonoutgoing(repo, other, revs,
990 force=opts.get('force'))
998 force=opts.get('force'))
991 o = outgoing.missing
999 o = outgoing.missing
992 if not o:
1000 if not o:
993 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
1001 scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
994 return o, other
1002 return o, other
995
1003
996 def outgoing(ui, repo, dest, opts):
1004 def outgoing(ui, repo, dest, opts):
997 def recurse():
1005 def recurse():
998 ret = 1
1006 ret = 1
999 if opts.get('subrepos'):
1007 if opts.get('subrepos'):
1000 ctx = repo[None]
1008 ctx = repo[None]
1001 for subpath in sorted(ctx.substate):
1009 for subpath in sorted(ctx.substate):
1002 sub = ctx.sub(subpath)
1010 sub = ctx.sub(subpath)
1003 ret = min(ret, sub.outgoing(ui, dest, opts))
1011 ret = min(ret, sub.outgoing(ui, dest, opts))
1004 return ret
1012 return ret
1005
1013
1006 limit = logcmdutil.getlimit(opts)
1014 limit = logcmdutil.getlimit(opts)
1007 o, other = _outgoing(ui, repo, dest, opts)
1015 o, other = _outgoing(ui, repo, dest, opts)
1008 if not o:
1016 if not o:
1009 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1017 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1010 return recurse()
1018 return recurse()
1011
1019
1012 if opts.get('newest_first'):
1020 if opts.get('newest_first'):
1013 o.reverse()
1021 o.reverse()
1014 ui.pager('outgoing')
1022 ui.pager('outgoing')
1015 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
1023 displayer = logcmdutil.changesetdisplayer(ui, repo, opts)
1016 count = 0
1024 count = 0
1017 for n in o:
1025 for n in o:
1018 if limit is not None and count >= limit:
1026 if limit is not None and count >= limit:
1019 break
1027 break
1020 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1028 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1021 if opts.get('no_merges') and len(parents) == 2:
1029 if opts.get('no_merges') and len(parents) == 2:
1022 continue
1030 continue
1023 count += 1
1031 count += 1
1024 displayer.show(repo[n])
1032 displayer.show(repo[n])
1025 displayer.close()
1033 displayer.close()
1026 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1034 cmdutil.outgoinghooks(ui, repo, other, opts, o)
1027 recurse()
1035 recurse()
1028 return 0 # exit code is zero since we found outgoing changes
1036 return 0 # exit code is zero since we found outgoing changes
1029
1037
1030 def verify(repo):
1038 def verify(repo):
1031 """verify the consistency of a repository"""
1039 """verify the consistency of a repository"""
1032 ret = verifymod.verify(repo)
1040 ret = verifymod.verify(repo)
1033
1041
1034 # Broken subrepo references in hidden csets don't seem worth worrying about,
1042 # Broken subrepo references in hidden csets don't seem worth worrying about,
1035 # since they can't be pushed/pulled, and --hidden can be used if they are a
1043 # since they can't be pushed/pulled, and --hidden can be used if they are a
1036 # concern.
1044 # concern.
1037
1045
1038 # pathto() is needed for -R case
1046 # pathto() is needed for -R case
1039 revs = repo.revs("filelog(%s)",
1047 revs = repo.revs("filelog(%s)",
1040 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
1048 util.pathto(repo.root, repo.getcwd(), '.hgsubstate'))
1041
1049
1042 if revs:
1050 if revs:
1043 repo.ui.status(_('checking subrepo links\n'))
1051 repo.ui.status(_('checking subrepo links\n'))
1044 for rev in revs:
1052 for rev in revs:
1045 ctx = repo[rev]
1053 ctx = repo[rev]
1046 try:
1054 try:
1047 for subpath in ctx.substate:
1055 for subpath in ctx.substate:
1048 try:
1056 try:
1049 ret = (ctx.sub(subpath, allowcreate=False).verify()
1057 ret = (ctx.sub(subpath, allowcreate=False).verify()
1050 or ret)
1058 or ret)
1051 except error.RepoError as e:
1059 except error.RepoError as e:
1052 repo.ui.warn(('%d: %s\n') % (rev, e))
1060 repo.ui.warn(('%d: %s\n') % (rev, e))
1053 except Exception:
1061 except Exception:
1054 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
1062 repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') %
1055 node.short(ctx.node()))
1063 node.short(ctx.node()))
1056
1064
1057 return ret
1065 return ret
1058
1066
1059 def remoteui(src, opts):
1067 def remoteui(src, opts):
1060 'build a remote ui from ui or repo and opts'
1068 'build a remote ui from ui or repo and opts'
1061 if util.safehasattr(src, 'baseui'): # looks like a repository
1069 if util.safehasattr(src, 'baseui'): # looks like a repository
1062 dst = src.baseui.copy() # drop repo-specific config
1070 dst = src.baseui.copy() # drop repo-specific config
1063 src = src.ui # copy target options from repo
1071 src = src.ui # copy target options from repo
1064 else: # assume it's a global ui object
1072 else: # assume it's a global ui object
1065 dst = src.copy() # keep all global options
1073 dst = src.copy() # keep all global options
1066
1074
1067 # copy ssh-specific options
1075 # copy ssh-specific options
1068 for o in 'ssh', 'remotecmd':
1076 for o in 'ssh', 'remotecmd':
1069 v = opts.get(o) or src.config('ui', o)
1077 v = opts.get(o) or src.config('ui', o)
1070 if v:
1078 if v:
1071 dst.setconfig("ui", o, v, 'copied')
1079 dst.setconfig("ui", o, v, 'copied')
1072
1080
1073 # copy bundle-specific options
1081 # copy bundle-specific options
1074 r = src.config('bundle', 'mainreporoot')
1082 r = src.config('bundle', 'mainreporoot')
1075 if r:
1083 if r:
1076 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
1084 dst.setconfig('bundle', 'mainreporoot', r, 'copied')
1077
1085
1078 # copy selected local settings to the remote ui
1086 # copy selected local settings to the remote ui
1079 for sect in ('auth', 'hostfingerprints', 'hostsecurity', 'http_proxy'):
1087 for sect in ('auth', 'hostfingerprints', 'hostsecurity', 'http_proxy'):
1080 for key, val in src.configitems(sect):
1088 for key, val in src.configitems(sect):
1081 dst.setconfig(sect, key, val, 'copied')
1089 dst.setconfig(sect, key, val, 'copied')
1082 v = src.config('web', 'cacerts')
1090 v = src.config('web', 'cacerts')
1083 if v:
1091 if v:
1084 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
1092 dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied')
1085
1093
1086 return dst
1094 return dst
1087
1095
1088 # Files of interest
1096 # Files of interest
1089 # Used to check if the repository has changed looking at mtime and size of
1097 # Used to check if the repository has changed looking at mtime and size of
1090 # these files.
1098 # these files.
1091 foi = [('spath', '00changelog.i'),
1099 foi = [('spath', '00changelog.i'),
1092 ('spath', 'phaseroots'), # ! phase can change content at the same size
1100 ('spath', 'phaseroots'), # ! phase can change content at the same size
1093 ('spath', 'obsstore'),
1101 ('spath', 'obsstore'),
1094 ('path', 'bookmarks'), # ! bookmark can change content at the same size
1102 ('path', 'bookmarks'), # ! bookmark can change content at the same size
1095 ]
1103 ]
1096
1104
1097 class cachedlocalrepo(object):
1105 class cachedlocalrepo(object):
1098 """Holds a localrepository that can be cached and reused."""
1106 """Holds a localrepository that can be cached and reused."""
1099
1107
1100 def __init__(self, repo):
1108 def __init__(self, repo):
1101 """Create a new cached repo from an existing repo.
1109 """Create a new cached repo from an existing repo.
1102
1110
1103 We assume the passed in repo was recently created. If the
1111 We assume the passed in repo was recently created. If the
1104 repo has changed between when it was created and when it was
1112 repo has changed between when it was created and when it was
1105 turned into a cache, it may not refresh properly.
1113 turned into a cache, it may not refresh properly.
1106 """
1114 """
1107 assert isinstance(repo, localrepo.localrepository)
1115 assert isinstance(repo, localrepo.localrepository)
1108 self._repo = repo
1116 self._repo = repo
1109 self._state, self.mtime = self._repostate()
1117 self._state, self.mtime = self._repostate()
1110 self._filtername = repo.filtername
1118 self._filtername = repo.filtername
1111
1119
1112 def fetch(self):
1120 def fetch(self):
1113 """Refresh (if necessary) and return a repository.
1121 """Refresh (if necessary) and return a repository.
1114
1122
1115 If the cached instance is out of date, it will be recreated
1123 If the cached instance is out of date, it will be recreated
1116 automatically and returned.
1124 automatically and returned.
1117
1125
1118 Returns a tuple of the repo and a boolean indicating whether a new
1126 Returns a tuple of the repo and a boolean indicating whether a new
1119 repo instance was created.
1127 repo instance was created.
1120 """
1128 """
1121 # We compare the mtimes and sizes of some well-known files to
1129 # We compare the mtimes and sizes of some well-known files to
1122 # determine if the repo changed. This is not precise, as mtimes
1130 # determine if the repo changed. This is not precise, as mtimes
1123 # are susceptible to clock skew and imprecise filesystems and
1131 # are susceptible to clock skew and imprecise filesystems and
1124 # file content can change while maintaining the same size.
1132 # file content can change while maintaining the same size.
1125
1133
1126 state, mtime = self._repostate()
1134 state, mtime = self._repostate()
1127 if state == self._state:
1135 if state == self._state:
1128 return self._repo, False
1136 return self._repo, False
1129
1137
1130 repo = repository(self._repo.baseui, self._repo.url())
1138 repo = repository(self._repo.baseui, self._repo.url())
1131 if self._filtername:
1139 if self._filtername:
1132 self._repo = repo.filtered(self._filtername)
1140 self._repo = repo.filtered(self._filtername)
1133 else:
1141 else:
1134 self._repo = repo.unfiltered()
1142 self._repo = repo.unfiltered()
1135 self._state = state
1143 self._state = state
1136 self.mtime = mtime
1144 self.mtime = mtime
1137
1145
1138 return self._repo, True
1146 return self._repo, True
1139
1147
1140 def _repostate(self):
1148 def _repostate(self):
1141 state = []
1149 state = []
1142 maxmtime = -1
1150 maxmtime = -1
1143 for attr, fname in foi:
1151 for attr, fname in foi:
1144 prefix = getattr(self._repo, attr)
1152 prefix = getattr(self._repo, attr)
1145 p = os.path.join(prefix, fname)
1153 p = os.path.join(prefix, fname)
1146 try:
1154 try:
1147 st = os.stat(p)
1155 st = os.stat(p)
1148 except OSError:
1156 except OSError:
1149 st = os.stat(prefix)
1157 st = os.stat(prefix)
1150 state.append((st[stat.ST_MTIME], st.st_size))
1158 state.append((st[stat.ST_MTIME], st.st_size))
1151 maxmtime = max(maxmtime, st[stat.ST_MTIME])
1159 maxmtime = max(maxmtime, st[stat.ST_MTIME])
1152
1160
1153 return tuple(state), maxmtime
1161 return tuple(state), maxmtime
1154
1162
1155 def copy(self):
1163 def copy(self):
1156 """Obtain a copy of this class instance.
1164 """Obtain a copy of this class instance.
1157
1165
1158 A new localrepository instance is obtained. The new instance should be
1166 A new localrepository instance is obtained. The new instance should be
1159 completely independent of the original.
1167 completely independent of the original.
1160 """
1168 """
1161 repo = repository(self._repo.baseui, self._repo.origroot)
1169 repo = repository(self._repo.baseui, self._repo.origroot)
1162 if self._filtername:
1170 if self._filtername:
1163 repo = repo.filtered(self._filtername)
1171 repo = repo.filtered(self._filtername)
1164 else:
1172 else:
1165 repo = repo.unfiltered()
1173 repo = repo.unfiltered()
1166 c = cachedlocalrepo(repo)
1174 c = cachedlocalrepo(repo)
1167 c._state = self._state
1175 c._state = self._state
1168 c.mtime = self.mtime
1176 c.mtime = self.mtime
1169 return c
1177 return c
@@ -1,100 +1,131 b''
1 ensure that failing ui.atexit handlers report sensibly
1 ensure that failing ui.atexit handlers report sensibly
2
2
3 $ cat > $TESTTMP/bailatexit.py <<EOF
3 $ cat > $TESTTMP/bailatexit.py <<EOF
4 > from mercurial import util
4 > from mercurial import util
5 > def bail():
5 > def bail():
6 > raise RuntimeError('ui.atexit handler exception')
6 > raise RuntimeError('ui.atexit handler exception')
7 >
7 >
8 > def extsetup(ui):
8 > def extsetup(ui):
9 > ui.atexit(bail)
9 > ui.atexit(bail)
10 > EOF
10 > EOF
11 $ hg -q --config extensions.bailatexit=$TESTTMP/bailatexit.py \
11 $ hg -q --config extensions.bailatexit=$TESTTMP/bailatexit.py \
12 > help help
12 > help help
13 hg help [-ecks] [TOPIC]
13 hg help [-ecks] [TOPIC]
14
14
15 show help for a given topic or a help overview
15 show help for a given topic or a help overview
16 error in exit handlers:
16 error in exit handlers:
17 Traceback (most recent call last):
17 Traceback (most recent call last):
18 File "*/mercurial/dispatch.py", line *, in _runexithandlers (glob)
18 File "*/mercurial/dispatch.py", line *, in _runexithandlers (glob)
19 func(*args, **kwargs)
19 func(*args, **kwargs)
20 File "$TESTTMP/bailatexit.py", line *, in bail (glob)
20 File "$TESTTMP/bailatexit.py", line *, in bail (glob)
21 raise RuntimeError('ui.atexit handler exception')
21 raise RuntimeError('ui.atexit handler exception')
22 RuntimeError: ui.atexit handler exception
22 RuntimeError: ui.atexit handler exception
23 [255]
23 [255]
24
24
25 $ rm $TESTTMP/bailatexit.py
25 $ rm $TESTTMP/bailatexit.py
26
26
27 another bad extension
27 another bad extension
28
28
29 $ echo 'raise Exception("bit bucket overflow")' > badext.py
29 $ echo 'raise Exception("bit bucket overflow")' > badext.py
30 $ abspathexc=`pwd`/badext.py
30 $ abspathexc=`pwd`/badext.py
31
31
32 $ cat >baddocext.py <<EOF
32 $ cat >baddocext.py <<EOF
33 > """
33 > """
34 > baddocext is bad
34 > baddocext is bad
35 > """
35 > """
36 > EOF
36 > EOF
37 $ abspathdoc=`pwd`/baddocext.py
37 $ abspathdoc=`pwd`/baddocext.py
38
38
39 $ cat <<EOF >> $HGRCPATH
39 $ cat <<EOF >> $HGRCPATH
40 > [extensions]
40 > [extensions]
41 > gpg =
41 > gpg =
42 > hgext.gpg =
42 > hgext.gpg =
43 > badext = $abspathexc
43 > badext = $abspathexc
44 > baddocext = $abspathdoc
44 > baddocext = $abspathdoc
45 > badext2 =
45 > badext2 =
46 > EOF
46 > EOF
47
47
48 $ hg -q help help 2>&1 |grep extension
48 $ hg -q help help 2>&1 |grep extension
49 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
49 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
50 *** failed to import extension badext2: No module named badext2
50 *** failed to import extension badext2: No module named badext2
51
51
52 show traceback
52 show traceback
53
53
54 $ hg -q help help --traceback 2>&1 | egrep ' extension|^Exception|Traceback|ImportError'
54 $ hg -q help help --traceback 2>&1 | egrep ' extension|^Exception|Traceback|ImportError'
55 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
55 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
56 Traceback (most recent call last):
56 Traceback (most recent call last):
57 Exception: bit bucket overflow
57 Exception: bit bucket overflow
58 *** failed to import extension badext2: No module named badext2
58 *** failed to import extension badext2: No module named badext2
59 Traceback (most recent call last):
59 Traceback (most recent call last):
60 ImportError: No module named badext2
60 ImportError: No module named badext2
61
61
62 names of extensions failed to load can be accessed via extensions.notloaded()
62 names of extensions failed to load can be accessed via extensions.notloaded()
63
63
64 $ cat <<EOF > showbadexts.py
64 $ cat <<EOF > showbadexts.py
65 > from mercurial import commands, extensions, registrar
65 > from mercurial import commands, extensions, registrar
66 > cmdtable = {}
66 > cmdtable = {}
67 > command = registrar.command(cmdtable)
67 > command = registrar.command(cmdtable)
68 > @command(b'showbadexts', norepo=True)
68 > @command(b'showbadexts', norepo=True)
69 > def showbadexts(ui, *pats, **opts):
69 > def showbadexts(ui, *pats, **opts):
70 > ui.write('BADEXTS: %s\n' % ' '.join(sorted(extensions.notloaded())))
70 > ui.write('BADEXTS: %s\n' % ' '.join(sorted(extensions.notloaded())))
71 > EOF
71 > EOF
72 $ hg --config extensions.badexts=showbadexts.py showbadexts 2>&1 | grep '^BADEXTS'
72 $ hg --config extensions.badexts=showbadexts.py showbadexts 2>&1 | grep '^BADEXTS'
73 BADEXTS: badext badext2
73 BADEXTS: badext badext2
74
74
75 show traceback for ImportError of hgext.name if devel.debug.extensions is set
75 show traceback for ImportError of hgext.name if devel.debug.extensions is set
76
76
77 $ (hg help help --traceback --debug --config devel.debug.extensions=yes 2>&1) \
77 $ (hg help help --traceback --debug --config devel.debug.extensions=yes 2>&1) \
78 > | grep -v '^ ' \
78 > | grep -v '^ ' \
79 > | egrep 'extension..[^p]|^Exception|Traceback|ImportError|not import'
79 > | egrep 'extension..[^p]|^Exception|Traceback|ImportError|not import'
80 debug.extensions: loading extensions
81 debug.extensions: - processing 5 entries
82 debug.extensions: - loading extension: 'gpg'
83 debug.extensions: > 'gpg' extension loaded in * (glob)
84 debug.extensions: - validating extension tables: 'gpg'
85 debug.extensions: - invoking registered callbacks: 'gpg'
86 debug.extensions: > callbacks completed in * (glob)
87 debug.extensions: - loading extension: 'badext'
80 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
88 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
81 Traceback (most recent call last):
89 Traceback (most recent call last):
82 Exception: bit bucket overflow
90 Exception: bit bucket overflow
83 could not import hgext.badext2 (No module named *badext2): trying hgext3rd.badext2 (glob)
91 debug.extensions: - loading extension: 'baddocext'
92 debug.extensions: > 'baddocext' extension loaded in * (glob)
93 debug.extensions: - validating extension tables: 'baddocext'
94 debug.extensions: - invoking registered callbacks: 'baddocext'
95 debug.extensions: > callbacks completed in * (glob)
96 debug.extensions: - loading extension: 'badext2'
97 debug.extensions: - could not import hgext.badext2 (No module named badext2): trying hgext3rd.badext2
84 Traceback (most recent call last):
98 Traceback (most recent call last):
85 ImportError: No module named *badext2 (glob)
99 ImportError: No module named *badext2 (glob)
86 could not import hgext3rd.badext2 (No module named *badext2): trying badext2 (glob)
100 debug.extensions: - could not import hgext3rd.badext2 (No module named badext2): trying badext2
87 Traceback (most recent call last):
101 Traceback (most recent call last):
88 ImportError: No module named *badext2 (glob)
102 ImportError: No module named *badext2 (glob)
89 *** failed to import extension badext2: No module named badext2
103 *** failed to import extension badext2: No module named badext2
90 Traceback (most recent call last):
104 Traceback (most recent call last):
91 ImportError: No module named badext2
105 ImportError: No module named badext2
106 debug.extensions: > loaded 2 extensions, total time * (glob)
107 debug.extensions: - loading configtable attributes
108 debug.extensions: - executing uisetup hooks
109 debug.extensions: - running uisetup for 'gpg'
110 debug.extensions: > uisetup for 'gpg' took * (glob)
111 debug.extensions: - running uisetup for 'baddocext'
112 debug.extensions: > uisetup for 'baddocext' took * (glob)
113 debug.extensions: - executing extsetup hooks
114 debug.extensions: - running extsetup for 'gpg'
115 debug.extensions: > extsetup for 'gpg' took * (glob)
116 debug.extensions: - running extsetup for 'baddocext'
117 debug.extensions: > extsetup for 'baddocext' took * (glob)
118 debug.extensions: - executing remaining aftercallbacks
119 debug.extensions: > remaining aftercallbacks completed in * (glob)
120 debug.extensions: - loading extension registration objects
121 debug.extensions: > extension registration object loading took * (glob)
122 debug.extensions: extension loading complete
92
123
93 confirm that there's no crash when an extension's documentation is bad
124 confirm that there's no crash when an extension's documentation is bad
94
125
95 $ hg help --keyword baddocext
126 $ hg help --keyword baddocext
96 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
127 *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow
97 *** failed to import extension badext2: No module named badext2
128 *** failed to import extension badext2: No module named badext2
98 Topics:
129 Topics:
99
130
100 extensions Using Additional Features
131 extensions Using Additional Features
General Comments 0
You need to be logged in to leave comments. Login now