##// END OF EJS Templates
py3: replace sys.executable with pycompat.sysexecutable...
Pulkit Goyal -
r30669:10b17ed9 default
parent child Browse files
Show More
@@ -1,644 +1,644
1 # chgserver.py - command server extension for cHg
1 # chgserver.py - command server extension for cHg
2 #
2 #
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
3 # Copyright 2011 Yuya Nishihara <yuya@tcha.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """command server extension for cHg
8 """command server extension for cHg
9
9
10 'S' channel (read/write)
10 'S' channel (read/write)
11 propagate ui.system() request to client
11 propagate ui.system() request to client
12
12
13 'attachio' command
13 'attachio' command
14 attach client's stdio passed by sendmsg()
14 attach client's stdio passed by sendmsg()
15
15
16 'chdir' command
16 'chdir' command
17 change current directory
17 change current directory
18
18
19 'getpager' command
19 'getpager' command
20 checks if pager is enabled and which pager should be executed
20 checks if pager is enabled and which pager should be executed
21
21
22 'setenv' command
22 'setenv' command
23 replace os.environ completely
23 replace os.environ completely
24
24
25 'setumask' command
25 'setumask' command
26 set umask
26 set umask
27
27
28 'validate' command
28 'validate' command
29 reload the config and check if the server is up to date
29 reload the config and check if the server is up to date
30
30
31 Config
31 Config
32 ------
32 ------
33
33
34 ::
34 ::
35
35
36 [chgserver]
36 [chgserver]
37 idletimeout = 3600 # seconds, after which an idle server will exit
37 idletimeout = 3600 # seconds, after which an idle server will exit
38 skiphash = False # whether to skip config or env change checks
38 skiphash = False # whether to skip config or env change checks
39 """
39 """
40
40
41 from __future__ import absolute_import
41 from __future__ import absolute_import
42
42
43 import errno
43 import errno
44 import hashlib
44 import hashlib
45 import inspect
45 import inspect
46 import os
46 import os
47 import re
47 import re
48 import signal
48 import signal
49 import struct
49 import struct
50 import sys
51 import time
50 import time
52
51
53 from .i18n import _
52 from .i18n import _
54
53
55 from . import (
54 from . import (
56 cmdutil,
55 cmdutil,
57 commandserver,
56 commandserver,
58 encoding,
57 encoding,
59 error,
58 error,
60 extensions,
59 extensions,
61 osutil,
60 osutil,
61 pycompat,
62 util,
62 util,
63 )
63 )
64
64
65 _log = commandserver.log
65 _log = commandserver.log
66
66
67 def _hashlist(items):
67 def _hashlist(items):
68 """return sha1 hexdigest for a list"""
68 """return sha1 hexdigest for a list"""
69 return hashlib.sha1(str(items)).hexdigest()
69 return hashlib.sha1(str(items)).hexdigest()
70
70
71 # sensitive config sections affecting confighash
71 # sensitive config sections affecting confighash
72 _configsections = [
72 _configsections = [
73 'alias', # affects global state commands.table
73 'alias', # affects global state commands.table
74 'extdiff', # uisetup will register new commands
74 'extdiff', # uisetup will register new commands
75 'extensions',
75 'extensions',
76 ]
76 ]
77
77
78 # sensitive environment variables affecting confighash
78 # sensitive environment variables affecting confighash
79 _envre = re.compile(r'''\A(?:
79 _envre = re.compile(r'''\A(?:
80 CHGHG
80 CHGHG
81 |HG(?:[A-Z].*)?
81 |HG(?:[A-Z].*)?
82 |LANG(?:UAGE)?
82 |LANG(?:UAGE)?
83 |LC_.*
83 |LC_.*
84 |LD_.*
84 |LD_.*
85 |PATH
85 |PATH
86 |PYTHON.*
86 |PYTHON.*
87 |TERM(?:INFO)?
87 |TERM(?:INFO)?
88 |TZ
88 |TZ
89 )\Z''', re.X)
89 )\Z''', re.X)
90
90
91 def _confighash(ui):
91 def _confighash(ui):
92 """return a quick hash for detecting config/env changes
92 """return a quick hash for detecting config/env changes
93
93
94 confighash is the hash of sensitive config items and environment variables.
94 confighash is the hash of sensitive config items and environment variables.
95
95
96 for chgserver, it is designed that once confighash changes, the server is
96 for chgserver, it is designed that once confighash changes, the server is
97 not qualified to serve its client and should redirect the client to a new
97 not qualified to serve its client and should redirect the client to a new
98 server. different from mtimehash, confighash change will not mark the
98 server. different from mtimehash, confighash change will not mark the
99 server outdated and exit since the user can have different configs at the
99 server outdated and exit since the user can have different configs at the
100 same time.
100 same time.
101 """
101 """
102 sectionitems = []
102 sectionitems = []
103 for section in _configsections:
103 for section in _configsections:
104 sectionitems.append(ui.configitems(section))
104 sectionitems.append(ui.configitems(section))
105 sectionhash = _hashlist(sectionitems)
105 sectionhash = _hashlist(sectionitems)
106 envitems = [(k, v) for k, v in encoding.environ.iteritems()
106 envitems = [(k, v) for k, v in encoding.environ.iteritems()
107 if _envre.match(k)]
107 if _envre.match(k)]
108 envhash = _hashlist(sorted(envitems))
108 envhash = _hashlist(sorted(envitems))
109 return sectionhash[:6] + envhash[:6]
109 return sectionhash[:6] + envhash[:6]
110
110
111 def _getmtimepaths(ui):
111 def _getmtimepaths(ui):
112 """get a list of paths that should be checked to detect change
112 """get a list of paths that should be checked to detect change
113
113
114 The list will include:
114 The list will include:
115 - extensions (will not cover all files for complex extensions)
115 - extensions (will not cover all files for complex extensions)
116 - mercurial/__version__.py
116 - mercurial/__version__.py
117 - python binary
117 - python binary
118 """
118 """
119 modules = [m for n, m in extensions.extensions(ui)]
119 modules = [m for n, m in extensions.extensions(ui)]
120 try:
120 try:
121 from . import __version__
121 from . import __version__
122 modules.append(__version__)
122 modules.append(__version__)
123 except ImportError:
123 except ImportError:
124 pass
124 pass
125 files = [sys.executable]
125 files = [pycompat.sysexecutable]
126 for m in modules:
126 for m in modules:
127 try:
127 try:
128 files.append(inspect.getabsfile(m))
128 files.append(inspect.getabsfile(m))
129 except TypeError:
129 except TypeError:
130 pass
130 pass
131 return sorted(set(files))
131 return sorted(set(files))
132
132
133 def _mtimehash(paths):
133 def _mtimehash(paths):
134 """return a quick hash for detecting file changes
134 """return a quick hash for detecting file changes
135
135
136 mtimehash calls stat on given paths and calculate a hash based on size and
136 mtimehash calls stat on given paths and calculate a hash based on size and
137 mtime of each file. mtimehash does not read file content because reading is
137 mtime of each file. mtimehash does not read file content because reading is
138 expensive. therefore it's not 100% reliable for detecting content changes.
138 expensive. therefore it's not 100% reliable for detecting content changes.
139 it's possible to return different hashes for same file contents.
139 it's possible to return different hashes for same file contents.
140 it's also possible to return a same hash for different file contents for
140 it's also possible to return a same hash for different file contents for
141 some carefully crafted situation.
141 some carefully crafted situation.
142
142
143 for chgserver, it is designed that once mtimehash changes, the server is
143 for chgserver, it is designed that once mtimehash changes, the server is
144 considered outdated immediately and should no longer provide service.
144 considered outdated immediately and should no longer provide service.
145
145
146 mtimehash is not included in confighash because we only know the paths of
146 mtimehash is not included in confighash because we only know the paths of
147 extensions after importing them (there is imp.find_module but that faces
147 extensions after importing them (there is imp.find_module but that faces
148 race conditions). We need to calculate confighash without importing.
148 race conditions). We need to calculate confighash without importing.
149 """
149 """
150 def trystat(path):
150 def trystat(path):
151 try:
151 try:
152 st = os.stat(path)
152 st = os.stat(path)
153 return (st.st_mtime, st.st_size)
153 return (st.st_mtime, st.st_size)
154 except OSError:
154 except OSError:
155 # could be ENOENT, EPERM etc. not fatal in any case
155 # could be ENOENT, EPERM etc. not fatal in any case
156 pass
156 pass
157 return _hashlist(map(trystat, paths))[:12]
157 return _hashlist(map(trystat, paths))[:12]
158
158
159 class hashstate(object):
159 class hashstate(object):
160 """a structure storing confighash, mtimehash, paths used for mtimehash"""
160 """a structure storing confighash, mtimehash, paths used for mtimehash"""
161 def __init__(self, confighash, mtimehash, mtimepaths):
161 def __init__(self, confighash, mtimehash, mtimepaths):
162 self.confighash = confighash
162 self.confighash = confighash
163 self.mtimehash = mtimehash
163 self.mtimehash = mtimehash
164 self.mtimepaths = mtimepaths
164 self.mtimepaths = mtimepaths
165
165
166 @staticmethod
166 @staticmethod
167 def fromui(ui, mtimepaths=None):
167 def fromui(ui, mtimepaths=None):
168 if mtimepaths is None:
168 if mtimepaths is None:
169 mtimepaths = _getmtimepaths(ui)
169 mtimepaths = _getmtimepaths(ui)
170 confighash = _confighash(ui)
170 confighash = _confighash(ui)
171 mtimehash = _mtimehash(mtimepaths)
171 mtimehash = _mtimehash(mtimepaths)
172 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
172 _log('confighash = %s mtimehash = %s\n' % (confighash, mtimehash))
173 return hashstate(confighash, mtimehash, mtimepaths)
173 return hashstate(confighash, mtimehash, mtimepaths)
174
174
175 # copied from hgext/pager.py:uisetup()
175 # copied from hgext/pager.py:uisetup()
176 def _setuppagercmd(ui, options, cmd):
176 def _setuppagercmd(ui, options, cmd):
177 from . import commands # avoid cycle
177 from . import commands # avoid cycle
178
178
179 if not ui.formatted():
179 if not ui.formatted():
180 return
180 return
181
181
182 p = ui.config("pager", "pager", encoding.environ.get("PAGER"))
182 p = ui.config("pager", "pager", encoding.environ.get("PAGER"))
183 usepager = False
183 usepager = False
184 always = util.parsebool(options['pager'])
184 always = util.parsebool(options['pager'])
185 auto = options['pager'] == 'auto'
185 auto = options['pager'] == 'auto'
186
186
187 if not p:
187 if not p:
188 pass
188 pass
189 elif always:
189 elif always:
190 usepager = True
190 usepager = True
191 elif not auto:
191 elif not auto:
192 usepager = False
192 usepager = False
193 else:
193 else:
194 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
194 attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
195 attend = ui.configlist('pager', 'attend', attended)
195 attend = ui.configlist('pager', 'attend', attended)
196 ignore = ui.configlist('pager', 'ignore')
196 ignore = ui.configlist('pager', 'ignore')
197 cmds, _ = cmdutil.findcmd(cmd, commands.table)
197 cmds, _ = cmdutil.findcmd(cmd, commands.table)
198
198
199 for cmd in cmds:
199 for cmd in cmds:
200 var = 'attend-%s' % cmd
200 var = 'attend-%s' % cmd
201 if ui.config('pager', var):
201 if ui.config('pager', var):
202 usepager = ui.configbool('pager', var)
202 usepager = ui.configbool('pager', var)
203 break
203 break
204 if (cmd in attend or
204 if (cmd in attend or
205 (cmd not in ignore and not attend)):
205 (cmd not in ignore and not attend)):
206 usepager = True
206 usepager = True
207 break
207 break
208
208
209 if usepager:
209 if usepager:
210 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
210 ui.setconfig('ui', 'formatted', ui.formatted(), 'pager')
211 ui.setconfig('ui', 'interactive', False, 'pager')
211 ui.setconfig('ui', 'interactive', False, 'pager')
212 return p
212 return p
213
213
214 def _newchgui(srcui, csystem):
214 def _newchgui(srcui, csystem):
215 class chgui(srcui.__class__):
215 class chgui(srcui.__class__):
216 def __init__(self, src=None):
216 def __init__(self, src=None):
217 super(chgui, self).__init__(src)
217 super(chgui, self).__init__(src)
218 if src:
218 if src:
219 self._csystem = getattr(src, '_csystem', csystem)
219 self._csystem = getattr(src, '_csystem', csystem)
220 else:
220 else:
221 self._csystem = csystem
221 self._csystem = csystem
222
222
223 def system(self, cmd, environ=None, cwd=None, onerr=None,
223 def system(self, cmd, environ=None, cwd=None, onerr=None,
224 errprefix=None):
224 errprefix=None):
225 # fallback to the original system method if the output needs to be
225 # fallback to the original system method if the output needs to be
226 # captured (to self._buffers), or the output stream is not stdout
226 # captured (to self._buffers), or the output stream is not stdout
227 # (e.g. stderr, cStringIO), because the chg client is not aware of
227 # (e.g. stderr, cStringIO), because the chg client is not aware of
228 # these situations and will behave differently (write to stdout).
228 # these situations and will behave differently (write to stdout).
229 if (any(s[1] for s in self._bufferstates)
229 if (any(s[1] for s in self._bufferstates)
230 or not util.safehasattr(self.fout, 'fileno')
230 or not util.safehasattr(self.fout, 'fileno')
231 or self.fout.fileno() != util.stdout.fileno()):
231 or self.fout.fileno() != util.stdout.fileno()):
232 return super(chgui, self).system(cmd, environ, cwd, onerr,
232 return super(chgui, self).system(cmd, environ, cwd, onerr,
233 errprefix)
233 errprefix)
234 # copied from mercurial/util.py:system()
234 # copied from mercurial/util.py:system()
235 self.flush()
235 self.flush()
236 def py2shell(val):
236 def py2shell(val):
237 if val is None or val is False:
237 if val is None or val is False:
238 return '0'
238 return '0'
239 if val is True:
239 if val is True:
240 return '1'
240 return '1'
241 return str(val)
241 return str(val)
242 env = encoding.environ.copy()
242 env = encoding.environ.copy()
243 if environ:
243 if environ:
244 env.update((k, py2shell(v)) for k, v in environ.iteritems())
244 env.update((k, py2shell(v)) for k, v in environ.iteritems())
245 env['HG'] = util.hgexecutable()
245 env['HG'] = util.hgexecutable()
246 rc = self._csystem(cmd, env, cwd)
246 rc = self._csystem(cmd, env, cwd)
247 if rc and onerr:
247 if rc and onerr:
248 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
248 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
249 util.explainexit(rc)[0])
249 util.explainexit(rc)[0])
250 if errprefix:
250 if errprefix:
251 errmsg = '%s: %s' % (errprefix, errmsg)
251 errmsg = '%s: %s' % (errprefix, errmsg)
252 raise onerr(errmsg)
252 raise onerr(errmsg)
253 return rc
253 return rc
254
254
255 return chgui(srcui)
255 return chgui(srcui)
256
256
257 def _loadnewui(srcui, args):
257 def _loadnewui(srcui, args):
258 from . import dispatch # avoid cycle
258 from . import dispatch # avoid cycle
259
259
260 newui = srcui.__class__.load()
260 newui = srcui.__class__.load()
261 for a in ['fin', 'fout', 'ferr', 'environ']:
261 for a in ['fin', 'fout', 'ferr', 'environ']:
262 setattr(newui, a, getattr(srcui, a))
262 setattr(newui, a, getattr(srcui, a))
263 if util.safehasattr(srcui, '_csystem'):
263 if util.safehasattr(srcui, '_csystem'):
264 newui._csystem = srcui._csystem
264 newui._csystem = srcui._csystem
265
265
266 # command line args
266 # command line args
267 args = args[:]
267 args = args[:]
268 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
268 dispatch._parseconfig(newui, dispatch._earlygetopt(['--config'], args))
269
269
270 # stolen from tortoisehg.util.copydynamicconfig()
270 # stolen from tortoisehg.util.copydynamicconfig()
271 for section, name, value in srcui.walkconfig():
271 for section, name, value in srcui.walkconfig():
272 source = srcui.configsource(section, name)
272 source = srcui.configsource(section, name)
273 if ':' in source or source == '--config':
273 if ':' in source or source == '--config':
274 # path:line or command line
274 # path:line or command line
275 continue
275 continue
276 newui.setconfig(section, name, value, source)
276 newui.setconfig(section, name, value, source)
277
277
278 # load wd and repo config, copied from dispatch.py
278 # load wd and repo config, copied from dispatch.py
279 cwds = dispatch._earlygetopt(['--cwd'], args)
279 cwds = dispatch._earlygetopt(['--cwd'], args)
280 cwd = cwds and os.path.realpath(cwds[-1]) or None
280 cwd = cwds and os.path.realpath(cwds[-1]) or None
281 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
281 rpath = dispatch._earlygetopt(["-R", "--repository", "--repo"], args)
282 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
282 path, newlui = dispatch._getlocal(newui, rpath, wd=cwd)
283
283
284 return (newui, newlui)
284 return (newui, newlui)
285
285
286 class channeledsystem(object):
286 class channeledsystem(object):
287 """Propagate ui.system() request in the following format:
287 """Propagate ui.system() request in the following format:
288
288
289 payload length (unsigned int),
289 payload length (unsigned int),
290 cmd, '\0',
290 cmd, '\0',
291 cwd, '\0',
291 cwd, '\0',
292 envkey, '=', val, '\0',
292 envkey, '=', val, '\0',
293 ...
293 ...
294 envkey, '=', val
294 envkey, '=', val
295
295
296 and waits:
296 and waits:
297
297
298 exitcode length (unsigned int),
298 exitcode length (unsigned int),
299 exitcode (int)
299 exitcode (int)
300 """
300 """
301 def __init__(self, in_, out, channel):
301 def __init__(self, in_, out, channel):
302 self.in_ = in_
302 self.in_ = in_
303 self.out = out
303 self.out = out
304 self.channel = channel
304 self.channel = channel
305
305
306 def __call__(self, cmd, environ, cwd):
306 def __call__(self, cmd, environ, cwd):
307 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
307 args = [util.quotecommand(cmd), os.path.abspath(cwd or '.')]
308 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
308 args.extend('%s=%s' % (k, v) for k, v in environ.iteritems())
309 data = '\0'.join(args)
309 data = '\0'.join(args)
310 self.out.write(struct.pack('>cI', self.channel, len(data)))
310 self.out.write(struct.pack('>cI', self.channel, len(data)))
311 self.out.write(data)
311 self.out.write(data)
312 self.out.flush()
312 self.out.flush()
313
313
314 length = self.in_.read(4)
314 length = self.in_.read(4)
315 length, = struct.unpack('>I', length)
315 length, = struct.unpack('>I', length)
316 if length != 4:
316 if length != 4:
317 raise error.Abort(_('invalid response'))
317 raise error.Abort(_('invalid response'))
318 rc, = struct.unpack('>i', self.in_.read(4))
318 rc, = struct.unpack('>i', self.in_.read(4))
319 return rc
319 return rc
320
320
321 _iochannels = [
321 _iochannels = [
322 # server.ch, ui.fp, mode
322 # server.ch, ui.fp, mode
323 ('cin', 'fin', 'rb'),
323 ('cin', 'fin', 'rb'),
324 ('cout', 'fout', 'wb'),
324 ('cout', 'fout', 'wb'),
325 ('cerr', 'ferr', 'wb'),
325 ('cerr', 'ferr', 'wb'),
326 ]
326 ]
327
327
328 class chgcmdserver(commandserver.server):
328 class chgcmdserver(commandserver.server):
329 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
329 def __init__(self, ui, repo, fin, fout, sock, hashstate, baseaddress):
330 super(chgcmdserver, self).__init__(
330 super(chgcmdserver, self).__init__(
331 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
331 _newchgui(ui, channeledsystem(fin, fout, 'S')), repo, fin, fout)
332 self.clientsock = sock
332 self.clientsock = sock
333 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
333 self._oldios = [] # original (self.ch, ui.fp, fd) before "attachio"
334 self.hashstate = hashstate
334 self.hashstate = hashstate
335 self.baseaddress = baseaddress
335 self.baseaddress = baseaddress
336 if hashstate is not None:
336 if hashstate is not None:
337 self.capabilities = self.capabilities.copy()
337 self.capabilities = self.capabilities.copy()
338 self.capabilities['validate'] = chgcmdserver.validate
338 self.capabilities['validate'] = chgcmdserver.validate
339
339
340 def cleanup(self):
340 def cleanup(self):
341 super(chgcmdserver, self).cleanup()
341 super(chgcmdserver, self).cleanup()
342 # dispatch._runcatch() does not flush outputs if exception is not
342 # dispatch._runcatch() does not flush outputs if exception is not
343 # handled by dispatch._dispatch()
343 # handled by dispatch._dispatch()
344 self.ui.flush()
344 self.ui.flush()
345 self._restoreio()
345 self._restoreio()
346
346
347 def attachio(self):
347 def attachio(self):
348 """Attach to client's stdio passed via unix domain socket; all
348 """Attach to client's stdio passed via unix domain socket; all
349 channels except cresult will no longer be used
349 channels except cresult will no longer be used
350 """
350 """
351 # tell client to sendmsg() with 1-byte payload, which makes it
351 # tell client to sendmsg() with 1-byte payload, which makes it
352 # distinctive from "attachio\n" command consumed by client.read()
352 # distinctive from "attachio\n" command consumed by client.read()
353 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
353 self.clientsock.sendall(struct.pack('>cI', 'I', 1))
354 clientfds = osutil.recvfds(self.clientsock.fileno())
354 clientfds = osutil.recvfds(self.clientsock.fileno())
355 _log('received fds: %r\n' % clientfds)
355 _log('received fds: %r\n' % clientfds)
356
356
357 ui = self.ui
357 ui = self.ui
358 ui.flush()
358 ui.flush()
359 first = self._saveio()
359 first = self._saveio()
360 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
360 for fd, (cn, fn, mode) in zip(clientfds, _iochannels):
361 assert fd > 0
361 assert fd > 0
362 fp = getattr(ui, fn)
362 fp = getattr(ui, fn)
363 os.dup2(fd, fp.fileno())
363 os.dup2(fd, fp.fileno())
364 os.close(fd)
364 os.close(fd)
365 if not first:
365 if not first:
366 continue
366 continue
367 # reset buffering mode when client is first attached. as we want
367 # reset buffering mode when client is first attached. as we want
368 # to see output immediately on pager, the mode stays unchanged
368 # to see output immediately on pager, the mode stays unchanged
369 # when client re-attached. ferr is unchanged because it should
369 # when client re-attached. ferr is unchanged because it should
370 # be unbuffered no matter if it is a tty or not.
370 # be unbuffered no matter if it is a tty or not.
371 if fn == 'ferr':
371 if fn == 'ferr':
372 newfp = fp
372 newfp = fp
373 else:
373 else:
374 # make it line buffered explicitly because the default is
374 # make it line buffered explicitly because the default is
375 # decided on first write(), where fout could be a pager.
375 # decided on first write(), where fout could be a pager.
376 if fp.isatty():
376 if fp.isatty():
377 bufsize = 1 # line buffered
377 bufsize = 1 # line buffered
378 else:
378 else:
379 bufsize = -1 # system default
379 bufsize = -1 # system default
380 newfp = os.fdopen(fp.fileno(), mode, bufsize)
380 newfp = os.fdopen(fp.fileno(), mode, bufsize)
381 setattr(ui, fn, newfp)
381 setattr(ui, fn, newfp)
382 setattr(self, cn, newfp)
382 setattr(self, cn, newfp)
383
383
384 self.cresult.write(struct.pack('>i', len(clientfds)))
384 self.cresult.write(struct.pack('>i', len(clientfds)))
385
385
386 def _saveio(self):
386 def _saveio(self):
387 if self._oldios:
387 if self._oldios:
388 return False
388 return False
389 ui = self.ui
389 ui = self.ui
390 for cn, fn, _mode in _iochannels:
390 for cn, fn, _mode in _iochannels:
391 ch = getattr(self, cn)
391 ch = getattr(self, cn)
392 fp = getattr(ui, fn)
392 fp = getattr(ui, fn)
393 fd = os.dup(fp.fileno())
393 fd = os.dup(fp.fileno())
394 self._oldios.append((ch, fp, fd))
394 self._oldios.append((ch, fp, fd))
395 return True
395 return True
396
396
397 def _restoreio(self):
397 def _restoreio(self):
398 ui = self.ui
398 ui = self.ui
399 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
399 for (ch, fp, fd), (cn, fn, _mode) in zip(self._oldios, _iochannels):
400 newfp = getattr(ui, fn)
400 newfp = getattr(ui, fn)
401 # close newfp while it's associated with client; otherwise it
401 # close newfp while it's associated with client; otherwise it
402 # would be closed when newfp is deleted
402 # would be closed when newfp is deleted
403 if newfp is not fp:
403 if newfp is not fp:
404 newfp.close()
404 newfp.close()
405 # restore original fd: fp is open again
405 # restore original fd: fp is open again
406 os.dup2(fd, fp.fileno())
406 os.dup2(fd, fp.fileno())
407 os.close(fd)
407 os.close(fd)
408 setattr(self, cn, ch)
408 setattr(self, cn, ch)
409 setattr(ui, fn, fp)
409 setattr(ui, fn, fp)
410 del self._oldios[:]
410 del self._oldios[:]
411
411
412 def validate(self):
412 def validate(self):
413 """Reload the config and check if the server is up to date
413 """Reload the config and check if the server is up to date
414
414
415 Read a list of '\0' separated arguments.
415 Read a list of '\0' separated arguments.
416 Write a non-empty list of '\0' separated instruction strings or '\0'
416 Write a non-empty list of '\0' separated instruction strings or '\0'
417 if the list is empty.
417 if the list is empty.
418 An instruction string could be either:
418 An instruction string could be either:
419 - "unlink $path", the client should unlink the path to stop the
419 - "unlink $path", the client should unlink the path to stop the
420 outdated server.
420 outdated server.
421 - "redirect $path", the client should attempt to connect to $path
421 - "redirect $path", the client should attempt to connect to $path
422 first. If it does not work, start a new server. It implies
422 first. If it does not work, start a new server. It implies
423 "reconnect".
423 "reconnect".
424 - "exit $n", the client should exit directly with code n.
424 - "exit $n", the client should exit directly with code n.
425 This may happen if we cannot parse the config.
425 This may happen if we cannot parse the config.
426 - "reconnect", the client should close the connection and
426 - "reconnect", the client should close the connection and
427 reconnect.
427 reconnect.
428 If neither "reconnect" nor "redirect" is included in the instruction
428 If neither "reconnect" nor "redirect" is included in the instruction
429 list, the client can continue with this server after completing all
429 list, the client can continue with this server after completing all
430 the instructions.
430 the instructions.
431 """
431 """
432 from . import dispatch # avoid cycle
432 from . import dispatch # avoid cycle
433
433
434 args = self._readlist()
434 args = self._readlist()
435 try:
435 try:
436 self.ui, lui = _loadnewui(self.ui, args)
436 self.ui, lui = _loadnewui(self.ui, args)
437 except error.ParseError as inst:
437 except error.ParseError as inst:
438 dispatch._formatparse(self.ui.warn, inst)
438 dispatch._formatparse(self.ui.warn, inst)
439 self.ui.flush()
439 self.ui.flush()
440 self.cresult.write('exit 255')
440 self.cresult.write('exit 255')
441 return
441 return
442 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
442 newhash = hashstate.fromui(lui, self.hashstate.mtimepaths)
443 insts = []
443 insts = []
444 if newhash.mtimehash != self.hashstate.mtimehash:
444 if newhash.mtimehash != self.hashstate.mtimehash:
445 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
445 addr = _hashaddress(self.baseaddress, self.hashstate.confighash)
446 insts.append('unlink %s' % addr)
446 insts.append('unlink %s' % addr)
447 # mtimehash is empty if one or more extensions fail to load.
447 # mtimehash is empty if one or more extensions fail to load.
448 # to be compatible with hg, still serve the client this time.
448 # to be compatible with hg, still serve the client this time.
449 if self.hashstate.mtimehash:
449 if self.hashstate.mtimehash:
450 insts.append('reconnect')
450 insts.append('reconnect')
451 if newhash.confighash != self.hashstate.confighash:
451 if newhash.confighash != self.hashstate.confighash:
452 addr = _hashaddress(self.baseaddress, newhash.confighash)
452 addr = _hashaddress(self.baseaddress, newhash.confighash)
453 insts.append('redirect %s' % addr)
453 insts.append('redirect %s' % addr)
454 _log('validate: %s\n' % insts)
454 _log('validate: %s\n' % insts)
455 self.cresult.write('\0'.join(insts) or '\0')
455 self.cresult.write('\0'.join(insts) or '\0')
456
456
457 def chdir(self):
457 def chdir(self):
458 """Change current directory
458 """Change current directory
459
459
460 Note that the behavior of --cwd option is bit different from this.
460 Note that the behavior of --cwd option is bit different from this.
461 It does not affect --config parameter.
461 It does not affect --config parameter.
462 """
462 """
463 path = self._readstr()
463 path = self._readstr()
464 if not path:
464 if not path:
465 return
465 return
466 _log('chdir to %r\n' % path)
466 _log('chdir to %r\n' % path)
467 os.chdir(path)
467 os.chdir(path)
468
468
469 def setumask(self):
469 def setumask(self):
470 """Change umask"""
470 """Change umask"""
471 mask = struct.unpack('>I', self._read(4))[0]
471 mask = struct.unpack('>I', self._read(4))[0]
472 _log('setumask %r\n' % mask)
472 _log('setumask %r\n' % mask)
473 os.umask(mask)
473 os.umask(mask)
474
474
475 def getpager(self):
475 def getpager(self):
476 """Read cmdargs and write pager command to r-channel if enabled
476 """Read cmdargs and write pager command to r-channel if enabled
477
477
478 If pager isn't enabled, this writes '\0' because channeledoutput
478 If pager isn't enabled, this writes '\0' because channeledoutput
479 does not allow to write empty data.
479 does not allow to write empty data.
480 """
480 """
481 from . import dispatch # avoid cycle
481 from . import dispatch # avoid cycle
482
482
483 args = self._readlist()
483 args = self._readlist()
484 try:
484 try:
485 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
485 cmd, _func, args, options, _cmdoptions = dispatch._parse(self.ui,
486 args)
486 args)
487 except (error.Abort, error.AmbiguousCommand, error.CommandError,
487 except (error.Abort, error.AmbiguousCommand, error.CommandError,
488 error.UnknownCommand):
488 error.UnknownCommand):
489 cmd = None
489 cmd = None
490 options = {}
490 options = {}
491 if not cmd or 'pager' not in options:
491 if not cmd or 'pager' not in options:
492 self.cresult.write('\0')
492 self.cresult.write('\0')
493 return
493 return
494
494
495 pagercmd = _setuppagercmd(self.ui, options, cmd)
495 pagercmd = _setuppagercmd(self.ui, options, cmd)
496 if pagercmd:
496 if pagercmd:
497 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
497 # Python's SIGPIPE is SIG_IGN by default. change to SIG_DFL so
498 # we can exit if the pipe to the pager is closed
498 # we can exit if the pipe to the pager is closed
499 if util.safehasattr(signal, 'SIGPIPE') and \
499 if util.safehasattr(signal, 'SIGPIPE') and \
500 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
500 signal.getsignal(signal.SIGPIPE) == signal.SIG_IGN:
501 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
501 signal.signal(signal.SIGPIPE, signal.SIG_DFL)
502 self.cresult.write(pagercmd)
502 self.cresult.write(pagercmd)
503 else:
503 else:
504 self.cresult.write('\0')
504 self.cresult.write('\0')
505
505
506 def runcommand(self):
506 def runcommand(self):
507 return super(chgcmdserver, self).runcommand()
507 return super(chgcmdserver, self).runcommand()
508
508
509 def setenv(self):
509 def setenv(self):
510 """Clear and update os.environ
510 """Clear and update os.environ
511
511
512 Note that not all variables can make an effect on the running process.
512 Note that not all variables can make an effect on the running process.
513 """
513 """
514 l = self._readlist()
514 l = self._readlist()
515 try:
515 try:
516 newenv = dict(s.split('=', 1) for s in l)
516 newenv = dict(s.split('=', 1) for s in l)
517 except ValueError:
517 except ValueError:
518 raise ValueError('unexpected value in setenv request')
518 raise ValueError('unexpected value in setenv request')
519 _log('setenv: %r\n' % sorted(newenv.keys()))
519 _log('setenv: %r\n' % sorted(newenv.keys()))
520 encoding.environ.clear()
520 encoding.environ.clear()
521 encoding.environ.update(newenv)
521 encoding.environ.update(newenv)
522
522
523 capabilities = commandserver.server.capabilities.copy()
523 capabilities = commandserver.server.capabilities.copy()
524 capabilities.update({'attachio': attachio,
524 capabilities.update({'attachio': attachio,
525 'chdir': chdir,
525 'chdir': chdir,
526 'getpager': getpager,
526 'getpager': getpager,
527 'runcommand': runcommand,
527 'runcommand': runcommand,
528 'setenv': setenv,
528 'setenv': setenv,
529 'setumask': setumask})
529 'setumask': setumask})
530
530
531 def _tempaddress(address):
531 def _tempaddress(address):
532 return '%s.%d.tmp' % (address, os.getpid())
532 return '%s.%d.tmp' % (address, os.getpid())
533
533
534 def _hashaddress(address, hashstr):
534 def _hashaddress(address, hashstr):
535 # if the basename of address contains '.', use only the left part. this
535 # if the basename of address contains '.', use only the left part. this
536 # makes it possible for the client to pass 'server.tmp$PID' and follow by
536 # makes it possible for the client to pass 'server.tmp$PID' and follow by
537 # an atomic rename to avoid locking when spawning new servers.
537 # an atomic rename to avoid locking when spawning new servers.
538 dirname, basename = os.path.split(address)
538 dirname, basename = os.path.split(address)
539 basename = basename.split('.', 1)[0]
539 basename = basename.split('.', 1)[0]
540 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
540 return '%s-%s' % (os.path.join(dirname, basename), hashstr)
541
541
542 class chgunixservicehandler(object):
542 class chgunixservicehandler(object):
543 """Set of operations for chg services"""
543 """Set of operations for chg services"""
544
544
545 pollinterval = 1 # [sec]
545 pollinterval = 1 # [sec]
546
546
547 def __init__(self, ui):
547 def __init__(self, ui):
548 self.ui = ui
548 self.ui = ui
549 self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
549 self._idletimeout = ui.configint('chgserver', 'idletimeout', 3600)
550 self._lastactive = time.time()
550 self._lastactive = time.time()
551
551
552 def bindsocket(self, sock, address):
552 def bindsocket(self, sock, address):
553 self._inithashstate(address)
553 self._inithashstate(address)
554 self._checkextensions()
554 self._checkextensions()
555 self._bind(sock)
555 self._bind(sock)
556 self._createsymlink()
556 self._createsymlink()
557
557
558 def _inithashstate(self, address):
558 def _inithashstate(self, address):
559 self._baseaddress = address
559 self._baseaddress = address
560 if self.ui.configbool('chgserver', 'skiphash', False):
560 if self.ui.configbool('chgserver', 'skiphash', False):
561 self._hashstate = None
561 self._hashstate = None
562 self._realaddress = address
562 self._realaddress = address
563 return
563 return
564 self._hashstate = hashstate.fromui(self.ui)
564 self._hashstate = hashstate.fromui(self.ui)
565 self._realaddress = _hashaddress(address, self._hashstate.confighash)
565 self._realaddress = _hashaddress(address, self._hashstate.confighash)
566
566
567 def _checkextensions(self):
567 def _checkextensions(self):
568 if not self._hashstate:
568 if not self._hashstate:
569 return
569 return
570 if extensions.notloaded():
570 if extensions.notloaded():
571 # one or more extensions failed to load. mtimehash becomes
571 # one or more extensions failed to load. mtimehash becomes
572 # meaningless because we do not know the paths of those extensions.
572 # meaningless because we do not know the paths of those extensions.
573 # set mtimehash to an illegal hash value to invalidate the server.
573 # set mtimehash to an illegal hash value to invalidate the server.
574 self._hashstate.mtimehash = ''
574 self._hashstate.mtimehash = ''
575
575
576 def _bind(self, sock):
576 def _bind(self, sock):
577 # use a unique temp address so we can stat the file and do ownership
577 # use a unique temp address so we can stat the file and do ownership
578 # check later
578 # check later
579 tempaddress = _tempaddress(self._realaddress)
579 tempaddress = _tempaddress(self._realaddress)
580 util.bindunixsocket(sock, tempaddress)
580 util.bindunixsocket(sock, tempaddress)
581 self._socketstat = os.stat(tempaddress)
581 self._socketstat = os.stat(tempaddress)
582 # rename will replace the old socket file if exists atomically. the
582 # rename will replace the old socket file if exists atomically. the
583 # old server will detect ownership change and exit.
583 # old server will detect ownership change and exit.
584 util.rename(tempaddress, self._realaddress)
584 util.rename(tempaddress, self._realaddress)
585
585
586 def _createsymlink(self):
586 def _createsymlink(self):
587 if self._baseaddress == self._realaddress:
587 if self._baseaddress == self._realaddress:
588 return
588 return
589 tempaddress = _tempaddress(self._baseaddress)
589 tempaddress = _tempaddress(self._baseaddress)
590 os.symlink(os.path.basename(self._realaddress), tempaddress)
590 os.symlink(os.path.basename(self._realaddress), tempaddress)
591 util.rename(tempaddress, self._baseaddress)
591 util.rename(tempaddress, self._baseaddress)
592
592
593 def _issocketowner(self):
593 def _issocketowner(self):
594 try:
594 try:
595 stat = os.stat(self._realaddress)
595 stat = os.stat(self._realaddress)
596 return (stat.st_ino == self._socketstat.st_ino and
596 return (stat.st_ino == self._socketstat.st_ino and
597 stat.st_mtime == self._socketstat.st_mtime)
597 stat.st_mtime == self._socketstat.st_mtime)
598 except OSError:
598 except OSError:
599 return False
599 return False
600
600
601 def unlinksocket(self, address):
601 def unlinksocket(self, address):
602 if not self._issocketowner():
602 if not self._issocketowner():
603 return
603 return
604 # it is possible to have a race condition here that we may
604 # it is possible to have a race condition here that we may
605 # remove another server's socket file. but that's okay
605 # remove another server's socket file. but that's okay
606 # since that server will detect and exit automatically and
606 # since that server will detect and exit automatically and
607 # the client will start a new server on demand.
607 # the client will start a new server on demand.
608 try:
608 try:
609 os.unlink(self._realaddress)
609 os.unlink(self._realaddress)
610 except OSError as exc:
610 except OSError as exc:
611 if exc.errno != errno.ENOENT:
611 if exc.errno != errno.ENOENT:
612 raise
612 raise
613
613
614 def printbanner(self, address):
614 def printbanner(self, address):
615 # no "listening at" message should be printed to simulate hg behavior
615 # no "listening at" message should be printed to simulate hg behavior
616 pass
616 pass
617
617
618 def shouldexit(self):
618 def shouldexit(self):
619 if not self._issocketowner():
619 if not self._issocketowner():
620 self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
620 self.ui.debug('%s is not owned, exiting.\n' % self._realaddress)
621 return True
621 return True
622 if time.time() - self._lastactive > self._idletimeout:
622 if time.time() - self._lastactive > self._idletimeout:
623 self.ui.debug('being idle too long. exiting.\n')
623 self.ui.debug('being idle too long. exiting.\n')
624 return True
624 return True
625 return False
625 return False
626
626
627 def newconnection(self):
627 def newconnection(self):
628 self._lastactive = time.time()
628 self._lastactive = time.time()
629
629
630 def createcmdserver(self, repo, conn, fin, fout):
630 def createcmdserver(self, repo, conn, fin, fout):
631 return chgcmdserver(self.ui, repo, fin, fout, conn,
631 return chgcmdserver(self.ui, repo, fin, fout, conn,
632 self._hashstate, self._baseaddress)
632 self._hashstate, self._baseaddress)
633
633
634 def chgunixservice(ui, repo, opts):
634 def chgunixservice(ui, repo, opts):
635 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
635 # CHGINTERNALMARK is temporarily set by chg client to detect if chg will
636 # start another chg. drop it to avoid possible side effects.
636 # start another chg. drop it to avoid possible side effects.
637 if 'CHGINTERNALMARK' in encoding.environ:
637 if 'CHGINTERNALMARK' in encoding.environ:
638 del encoding.environ['CHGINTERNALMARK']
638 del encoding.environ['CHGINTERNALMARK']
639
639
640 if repo:
640 if repo:
641 # one chgserver can serve multiple repos. drop repo information
641 # one chgserver can serve multiple repos. drop repo information
642 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
642 ui.setconfig('bundle', 'mainreporoot', '', 'repo')
643 h = chgunixservicehandler(ui)
643 h = chgunixservicehandler(ui)
644 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
644 return commandserver.unixforkingservice(ui, repo=None, opts=opts, handler=h)
@@ -1,6601 +1,6601
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import difflib
10 import difflib
11 import errno
11 import errno
12 import os
12 import os
13 import re
13 import re
14 import shlex
14 import shlex
15 import socket
15 import socket
16 import string
16 import string
17 import sys
17 import sys
18 import tempfile
18 import tempfile
19 import time
19 import time
20
20
21 from .i18n import _
21 from .i18n import _
22 from .node import (
22 from .node import (
23 bin,
23 bin,
24 hex,
24 hex,
25 nullhex,
25 nullhex,
26 nullid,
26 nullid,
27 nullrev,
27 nullrev,
28 short,
28 short,
29 )
29 )
30 from . import (
30 from . import (
31 archival,
31 archival,
32 bookmarks,
32 bookmarks,
33 bundle2,
33 bundle2,
34 changegroup,
34 changegroup,
35 cmdutil,
35 cmdutil,
36 copies,
36 copies,
37 destutil,
37 destutil,
38 dirstateguard,
38 dirstateguard,
39 discovery,
39 discovery,
40 encoding,
40 encoding,
41 error,
41 error,
42 exchange,
42 exchange,
43 extensions,
43 extensions,
44 formatter,
44 formatter,
45 graphmod,
45 graphmod,
46 hbisect,
46 hbisect,
47 help,
47 help,
48 hg,
48 hg,
49 lock as lockmod,
49 lock as lockmod,
50 merge as mergemod,
50 merge as mergemod,
51 minirst,
51 minirst,
52 obsolete,
52 obsolete,
53 patch,
53 patch,
54 phases,
54 phases,
55 policy,
55 policy,
56 pvec,
56 pvec,
57 pycompat,
57 pycompat,
58 repair,
58 repair,
59 revlog,
59 revlog,
60 revset,
60 revset,
61 scmutil,
61 scmutil,
62 server,
62 server,
63 sshserver,
63 sshserver,
64 sslutil,
64 sslutil,
65 streamclone,
65 streamclone,
66 templatekw,
66 templatekw,
67 templater,
67 templater,
68 ui as uimod,
68 ui as uimod,
69 util,
69 util,
70 )
70 )
71
71
72 release = lockmod.release
72 release = lockmod.release
73
73
74 table = {}
74 table = {}
75
75
76 command = cmdutil.command(table)
76 command = cmdutil.command(table)
77
77
78 # label constants
78 # label constants
79 # until 3.5, bookmarks.current was the advertised name, not
79 # until 3.5, bookmarks.current was the advertised name, not
80 # bookmarks.active, so we must use both to avoid breaking old
80 # bookmarks.active, so we must use both to avoid breaking old
81 # custom styles
81 # custom styles
82 activebookmarklabel = 'bookmarks.active bookmarks.current'
82 activebookmarklabel = 'bookmarks.active bookmarks.current'
83
83
84 # common command options
84 # common command options
85
85
86 globalopts = [
86 globalopts = [
87 ('R', 'repository', '',
87 ('R', 'repository', '',
88 _('repository root directory or name of overlay bundle file'),
88 _('repository root directory or name of overlay bundle file'),
89 _('REPO')),
89 _('REPO')),
90 ('', 'cwd', '',
90 ('', 'cwd', '',
91 _('change working directory'), _('DIR')),
91 _('change working directory'), _('DIR')),
92 ('y', 'noninteractive', None,
92 ('y', 'noninteractive', None,
93 _('do not prompt, automatically pick the first choice for all prompts')),
93 _('do not prompt, automatically pick the first choice for all prompts')),
94 ('q', 'quiet', None, _('suppress output')),
94 ('q', 'quiet', None, _('suppress output')),
95 ('v', 'verbose', None, _('enable additional output')),
95 ('v', 'verbose', None, _('enable additional output')),
96 ('', 'config', [],
96 ('', 'config', [],
97 _('set/override config option (use \'section.name=value\')'),
97 _('set/override config option (use \'section.name=value\')'),
98 _('CONFIG')),
98 _('CONFIG')),
99 ('', 'debug', None, _('enable debugging output')),
99 ('', 'debug', None, _('enable debugging output')),
100 ('', 'debugger', None, _('start debugger')),
100 ('', 'debugger', None, _('start debugger')),
101 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
101 ('', 'encoding', encoding.encoding, _('set the charset encoding'),
102 _('ENCODE')),
102 _('ENCODE')),
103 ('', 'encodingmode', encoding.encodingmode,
103 ('', 'encodingmode', encoding.encodingmode,
104 _('set the charset encoding mode'), _('MODE')),
104 _('set the charset encoding mode'), _('MODE')),
105 ('', 'traceback', None, _('always print a traceback on exception')),
105 ('', 'traceback', None, _('always print a traceback on exception')),
106 ('', 'time', None, _('time how long the command takes')),
106 ('', 'time', None, _('time how long the command takes')),
107 ('', 'profile', None, _('print command execution profile')),
107 ('', 'profile', None, _('print command execution profile')),
108 ('', 'version', None, _('output version information and exit')),
108 ('', 'version', None, _('output version information and exit')),
109 ('h', 'help', None, _('display help and exit')),
109 ('h', 'help', None, _('display help and exit')),
110 ('', 'hidden', False, _('consider hidden changesets')),
110 ('', 'hidden', False, _('consider hidden changesets')),
111 ]
111 ]
112
112
113 dryrunopts = [('n', 'dry-run', None,
113 dryrunopts = [('n', 'dry-run', None,
114 _('do not perform actions, just print output'))]
114 _('do not perform actions, just print output'))]
115
115
116 remoteopts = [
116 remoteopts = [
117 ('e', 'ssh', '',
117 ('e', 'ssh', '',
118 _('specify ssh command to use'), _('CMD')),
118 _('specify ssh command to use'), _('CMD')),
119 ('', 'remotecmd', '',
119 ('', 'remotecmd', '',
120 _('specify hg command to run on the remote side'), _('CMD')),
120 _('specify hg command to run on the remote side'), _('CMD')),
121 ('', 'insecure', None,
121 ('', 'insecure', None,
122 _('do not verify server certificate (ignoring web.cacerts config)')),
122 _('do not verify server certificate (ignoring web.cacerts config)')),
123 ]
123 ]
124
124
125 walkopts = [
125 walkopts = [
126 ('I', 'include', [],
126 ('I', 'include', [],
127 _('include names matching the given patterns'), _('PATTERN')),
127 _('include names matching the given patterns'), _('PATTERN')),
128 ('X', 'exclude', [],
128 ('X', 'exclude', [],
129 _('exclude names matching the given patterns'), _('PATTERN')),
129 _('exclude names matching the given patterns'), _('PATTERN')),
130 ]
130 ]
131
131
132 commitopts = [
132 commitopts = [
133 ('m', 'message', '',
133 ('m', 'message', '',
134 _('use text as commit message'), _('TEXT')),
134 _('use text as commit message'), _('TEXT')),
135 ('l', 'logfile', '',
135 ('l', 'logfile', '',
136 _('read commit message from file'), _('FILE')),
136 _('read commit message from file'), _('FILE')),
137 ]
137 ]
138
138
139 commitopts2 = [
139 commitopts2 = [
140 ('d', 'date', '',
140 ('d', 'date', '',
141 _('record the specified date as commit date'), _('DATE')),
141 _('record the specified date as commit date'), _('DATE')),
142 ('u', 'user', '',
142 ('u', 'user', '',
143 _('record the specified user as committer'), _('USER')),
143 _('record the specified user as committer'), _('USER')),
144 ]
144 ]
145
145
146 # hidden for now
146 # hidden for now
147 formatteropts = [
147 formatteropts = [
148 ('T', 'template', '',
148 ('T', 'template', '',
149 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
149 _('display with template (EXPERIMENTAL)'), _('TEMPLATE')),
150 ]
150 ]
151
151
152 templateopts = [
152 templateopts = [
153 ('', 'style', '',
153 ('', 'style', '',
154 _('display using template map file (DEPRECATED)'), _('STYLE')),
154 _('display using template map file (DEPRECATED)'), _('STYLE')),
155 ('T', 'template', '',
155 ('T', 'template', '',
156 _('display with template'), _('TEMPLATE')),
156 _('display with template'), _('TEMPLATE')),
157 ]
157 ]
158
158
159 logopts = [
159 logopts = [
160 ('p', 'patch', None, _('show patch')),
160 ('p', 'patch', None, _('show patch')),
161 ('g', 'git', None, _('use git extended diff format')),
161 ('g', 'git', None, _('use git extended diff format')),
162 ('l', 'limit', '',
162 ('l', 'limit', '',
163 _('limit number of changes displayed'), _('NUM')),
163 _('limit number of changes displayed'), _('NUM')),
164 ('M', 'no-merges', None, _('do not show merges')),
164 ('M', 'no-merges', None, _('do not show merges')),
165 ('', 'stat', None, _('output diffstat-style summary of changes')),
165 ('', 'stat', None, _('output diffstat-style summary of changes')),
166 ('G', 'graph', None, _("show the revision DAG")),
166 ('G', 'graph', None, _("show the revision DAG")),
167 ] + templateopts
167 ] + templateopts
168
168
169 diffopts = [
169 diffopts = [
170 ('a', 'text', None, _('treat all files as text')),
170 ('a', 'text', None, _('treat all files as text')),
171 ('g', 'git', None, _('use git extended diff format')),
171 ('g', 'git', None, _('use git extended diff format')),
172 ('', 'nodates', None, _('omit dates from diff headers'))
172 ('', 'nodates', None, _('omit dates from diff headers'))
173 ]
173 ]
174
174
175 diffwsopts = [
175 diffwsopts = [
176 ('w', 'ignore-all-space', None,
176 ('w', 'ignore-all-space', None,
177 _('ignore white space when comparing lines')),
177 _('ignore white space when comparing lines')),
178 ('b', 'ignore-space-change', None,
178 ('b', 'ignore-space-change', None,
179 _('ignore changes in the amount of white space')),
179 _('ignore changes in the amount of white space')),
180 ('B', 'ignore-blank-lines', None,
180 ('B', 'ignore-blank-lines', None,
181 _('ignore changes whose lines are all blank')),
181 _('ignore changes whose lines are all blank')),
182 ]
182 ]
183
183
184 diffopts2 = [
184 diffopts2 = [
185 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
185 ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')),
186 ('p', 'show-function', None, _('show which function each change is in')),
186 ('p', 'show-function', None, _('show which function each change is in')),
187 ('', 'reverse', None, _('produce a diff that undoes the changes')),
187 ('', 'reverse', None, _('produce a diff that undoes the changes')),
188 ] + diffwsopts + [
188 ] + diffwsopts + [
189 ('U', 'unified', '',
189 ('U', 'unified', '',
190 _('number of lines of context to show'), _('NUM')),
190 _('number of lines of context to show'), _('NUM')),
191 ('', 'stat', None, _('output diffstat-style summary of changes')),
191 ('', 'stat', None, _('output diffstat-style summary of changes')),
192 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
192 ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')),
193 ]
193 ]
194
194
195 mergetoolopts = [
195 mergetoolopts = [
196 ('t', 'tool', '', _('specify merge tool')),
196 ('t', 'tool', '', _('specify merge tool')),
197 ]
197 ]
198
198
199 similarityopts = [
199 similarityopts = [
200 ('s', 'similarity', '',
200 ('s', 'similarity', '',
201 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
201 _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
202 ]
202 ]
203
203
204 subrepoopts = [
204 subrepoopts = [
205 ('S', 'subrepos', None,
205 ('S', 'subrepos', None,
206 _('recurse into subrepositories'))
206 _('recurse into subrepositories'))
207 ]
207 ]
208
208
209 debugrevlogopts = [
209 debugrevlogopts = [
210 ('c', 'changelog', False, _('open changelog')),
210 ('c', 'changelog', False, _('open changelog')),
211 ('m', 'manifest', False, _('open manifest')),
211 ('m', 'manifest', False, _('open manifest')),
212 ('', 'dir', '', _('open directory manifest')),
212 ('', 'dir', '', _('open directory manifest')),
213 ]
213 ]
214
214
215 # Commands start here, listed alphabetically
215 # Commands start here, listed alphabetically
216
216
217 @command('^add',
217 @command('^add',
218 walkopts + subrepoopts + dryrunopts,
218 walkopts + subrepoopts + dryrunopts,
219 _('[OPTION]... [FILE]...'),
219 _('[OPTION]... [FILE]...'),
220 inferrepo=True)
220 inferrepo=True)
221 def add(ui, repo, *pats, **opts):
221 def add(ui, repo, *pats, **opts):
222 """add the specified files on the next commit
222 """add the specified files on the next commit
223
223
224 Schedule files to be version controlled and added to the
224 Schedule files to be version controlled and added to the
225 repository.
225 repository.
226
226
227 The files will be added to the repository at the next commit. To
227 The files will be added to the repository at the next commit. To
228 undo an add before that, see :hg:`forget`.
228 undo an add before that, see :hg:`forget`.
229
229
230 If no names are given, add all files to the repository (except
230 If no names are given, add all files to the repository (except
231 files matching ``.hgignore``).
231 files matching ``.hgignore``).
232
232
233 .. container:: verbose
233 .. container:: verbose
234
234
235 Examples:
235 Examples:
236
236
237 - New (unknown) files are added
237 - New (unknown) files are added
238 automatically by :hg:`add`::
238 automatically by :hg:`add`::
239
239
240 $ ls
240 $ ls
241 foo.c
241 foo.c
242 $ hg status
242 $ hg status
243 ? foo.c
243 ? foo.c
244 $ hg add
244 $ hg add
245 adding foo.c
245 adding foo.c
246 $ hg status
246 $ hg status
247 A foo.c
247 A foo.c
248
248
249 - Specific files to be added can be specified::
249 - Specific files to be added can be specified::
250
250
251 $ ls
251 $ ls
252 bar.c foo.c
252 bar.c foo.c
253 $ hg status
253 $ hg status
254 ? bar.c
254 ? bar.c
255 ? foo.c
255 ? foo.c
256 $ hg add bar.c
256 $ hg add bar.c
257 $ hg status
257 $ hg status
258 A bar.c
258 A bar.c
259 ? foo.c
259 ? foo.c
260
260
261 Returns 0 if all files are successfully added.
261 Returns 0 if all files are successfully added.
262 """
262 """
263
263
264 m = scmutil.match(repo[None], pats, opts)
264 m = scmutil.match(repo[None], pats, opts)
265 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
265 rejected = cmdutil.add(ui, repo, m, "", False, **opts)
266 return rejected and 1 or 0
266 return rejected and 1 or 0
267
267
268 @command('addremove',
268 @command('addremove',
269 similarityopts + subrepoopts + walkopts + dryrunopts,
269 similarityopts + subrepoopts + walkopts + dryrunopts,
270 _('[OPTION]... [FILE]...'),
270 _('[OPTION]... [FILE]...'),
271 inferrepo=True)
271 inferrepo=True)
272 def addremove(ui, repo, *pats, **opts):
272 def addremove(ui, repo, *pats, **opts):
273 """add all new files, delete all missing files
273 """add all new files, delete all missing files
274
274
275 Add all new files and remove all missing files from the
275 Add all new files and remove all missing files from the
276 repository.
276 repository.
277
277
278 Unless names are given, new files are ignored if they match any of
278 Unless names are given, new files are ignored if they match any of
279 the patterns in ``.hgignore``. As with add, these changes take
279 the patterns in ``.hgignore``. As with add, these changes take
280 effect at the next commit.
280 effect at the next commit.
281
281
282 Use the -s/--similarity option to detect renamed files. This
282 Use the -s/--similarity option to detect renamed files. This
283 option takes a percentage between 0 (disabled) and 100 (files must
283 option takes a percentage between 0 (disabled) and 100 (files must
284 be identical) as its parameter. With a parameter greater than 0,
284 be identical) as its parameter. With a parameter greater than 0,
285 this compares every removed file with every added file and records
285 this compares every removed file with every added file and records
286 those similar enough as renames. Detecting renamed files this way
286 those similar enough as renames. Detecting renamed files this way
287 can be expensive. After using this option, :hg:`status -C` can be
287 can be expensive. After using this option, :hg:`status -C` can be
288 used to check which files were identified as moved or renamed. If
288 used to check which files were identified as moved or renamed. If
289 not specified, -s/--similarity defaults to 100 and only renames of
289 not specified, -s/--similarity defaults to 100 and only renames of
290 identical files are detected.
290 identical files are detected.
291
291
292 .. container:: verbose
292 .. container:: verbose
293
293
294 Examples:
294 Examples:
295
295
296 - A number of files (bar.c and foo.c) are new,
296 - A number of files (bar.c and foo.c) are new,
297 while foobar.c has been removed (without using :hg:`remove`)
297 while foobar.c has been removed (without using :hg:`remove`)
298 from the repository::
298 from the repository::
299
299
300 $ ls
300 $ ls
301 bar.c foo.c
301 bar.c foo.c
302 $ hg status
302 $ hg status
303 ! foobar.c
303 ! foobar.c
304 ? bar.c
304 ? bar.c
305 ? foo.c
305 ? foo.c
306 $ hg addremove
306 $ hg addremove
307 adding bar.c
307 adding bar.c
308 adding foo.c
308 adding foo.c
309 removing foobar.c
309 removing foobar.c
310 $ hg status
310 $ hg status
311 A bar.c
311 A bar.c
312 A foo.c
312 A foo.c
313 R foobar.c
313 R foobar.c
314
314
315 - A file foobar.c was moved to foo.c without using :hg:`rename`.
315 - A file foobar.c was moved to foo.c without using :hg:`rename`.
316 Afterwards, it was edited slightly::
316 Afterwards, it was edited slightly::
317
317
318 $ ls
318 $ ls
319 foo.c
319 foo.c
320 $ hg status
320 $ hg status
321 ! foobar.c
321 ! foobar.c
322 ? foo.c
322 ? foo.c
323 $ hg addremove --similarity 90
323 $ hg addremove --similarity 90
324 removing foobar.c
324 removing foobar.c
325 adding foo.c
325 adding foo.c
326 recording removal of foobar.c as rename to foo.c (94% similar)
326 recording removal of foobar.c as rename to foo.c (94% similar)
327 $ hg status -C
327 $ hg status -C
328 A foo.c
328 A foo.c
329 foobar.c
329 foobar.c
330 R foobar.c
330 R foobar.c
331
331
332 Returns 0 if all files are successfully added.
332 Returns 0 if all files are successfully added.
333 """
333 """
334 try:
334 try:
335 sim = float(opts.get('similarity') or 100)
335 sim = float(opts.get('similarity') or 100)
336 except ValueError:
336 except ValueError:
337 raise error.Abort(_('similarity must be a number'))
337 raise error.Abort(_('similarity must be a number'))
338 if sim < 0 or sim > 100:
338 if sim < 0 or sim > 100:
339 raise error.Abort(_('similarity must be between 0 and 100'))
339 raise error.Abort(_('similarity must be between 0 and 100'))
340 matcher = scmutil.match(repo[None], pats, opts)
340 matcher = scmutil.match(repo[None], pats, opts)
341 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
341 return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0)
342
342
343 @command('^annotate|blame',
343 @command('^annotate|blame',
344 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
344 [('r', 'rev', '', _('annotate the specified revision'), _('REV')),
345 ('', 'follow', None,
345 ('', 'follow', None,
346 _('follow copies/renames and list the filename (DEPRECATED)')),
346 _('follow copies/renames and list the filename (DEPRECATED)')),
347 ('', 'no-follow', None, _("don't follow copies and renames")),
347 ('', 'no-follow', None, _("don't follow copies and renames")),
348 ('a', 'text', None, _('treat all files as text')),
348 ('a', 'text', None, _('treat all files as text')),
349 ('u', 'user', None, _('list the author (long with -v)')),
349 ('u', 'user', None, _('list the author (long with -v)')),
350 ('f', 'file', None, _('list the filename')),
350 ('f', 'file', None, _('list the filename')),
351 ('d', 'date', None, _('list the date (short with -q)')),
351 ('d', 'date', None, _('list the date (short with -q)')),
352 ('n', 'number', None, _('list the revision number (default)')),
352 ('n', 'number', None, _('list the revision number (default)')),
353 ('c', 'changeset', None, _('list the changeset')),
353 ('c', 'changeset', None, _('list the changeset')),
354 ('l', 'line-number', None, _('show line number at the first appearance'))
354 ('l', 'line-number', None, _('show line number at the first appearance'))
355 ] + diffwsopts + walkopts + formatteropts,
355 ] + diffwsopts + walkopts + formatteropts,
356 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
356 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'),
357 inferrepo=True)
357 inferrepo=True)
358 def annotate(ui, repo, *pats, **opts):
358 def annotate(ui, repo, *pats, **opts):
359 """show changeset information by line for each file
359 """show changeset information by line for each file
360
360
361 List changes in files, showing the revision id responsible for
361 List changes in files, showing the revision id responsible for
362 each line.
362 each line.
363
363
364 This command is useful for discovering when a change was made and
364 This command is useful for discovering when a change was made and
365 by whom.
365 by whom.
366
366
367 If you include --file, --user, or --date, the revision number is
367 If you include --file, --user, or --date, the revision number is
368 suppressed unless you also include --number.
368 suppressed unless you also include --number.
369
369
370 Without the -a/--text option, annotate will avoid processing files
370 Without the -a/--text option, annotate will avoid processing files
371 it detects as binary. With -a, annotate will annotate the file
371 it detects as binary. With -a, annotate will annotate the file
372 anyway, although the results will probably be neither useful
372 anyway, although the results will probably be neither useful
373 nor desirable.
373 nor desirable.
374
374
375 Returns 0 on success.
375 Returns 0 on success.
376 """
376 """
377 if not pats:
377 if not pats:
378 raise error.Abort(_('at least one filename or pattern is required'))
378 raise error.Abort(_('at least one filename or pattern is required'))
379
379
380 if opts.get('follow'):
380 if opts.get('follow'):
381 # --follow is deprecated and now just an alias for -f/--file
381 # --follow is deprecated and now just an alias for -f/--file
382 # to mimic the behavior of Mercurial before version 1.5
382 # to mimic the behavior of Mercurial before version 1.5
383 opts['file'] = True
383 opts['file'] = True
384
384
385 ctx = scmutil.revsingle(repo, opts.get('rev'))
385 ctx = scmutil.revsingle(repo, opts.get('rev'))
386
386
387 fm = ui.formatter('annotate', opts)
387 fm = ui.formatter('annotate', opts)
388 if ui.quiet:
388 if ui.quiet:
389 datefunc = util.shortdate
389 datefunc = util.shortdate
390 else:
390 else:
391 datefunc = util.datestr
391 datefunc = util.datestr
392 if ctx.rev() is None:
392 if ctx.rev() is None:
393 def hexfn(node):
393 def hexfn(node):
394 if node is None:
394 if node is None:
395 return None
395 return None
396 else:
396 else:
397 return fm.hexfunc(node)
397 return fm.hexfunc(node)
398 if opts.get('changeset'):
398 if opts.get('changeset'):
399 # omit "+" suffix which is appended to node hex
399 # omit "+" suffix which is appended to node hex
400 def formatrev(rev):
400 def formatrev(rev):
401 if rev is None:
401 if rev is None:
402 return '%d' % ctx.p1().rev()
402 return '%d' % ctx.p1().rev()
403 else:
403 else:
404 return '%d' % rev
404 return '%d' % rev
405 else:
405 else:
406 def formatrev(rev):
406 def formatrev(rev):
407 if rev is None:
407 if rev is None:
408 return '%d+' % ctx.p1().rev()
408 return '%d+' % ctx.p1().rev()
409 else:
409 else:
410 return '%d ' % rev
410 return '%d ' % rev
411 def formathex(hex):
411 def formathex(hex):
412 if hex is None:
412 if hex is None:
413 return '%s+' % fm.hexfunc(ctx.p1().node())
413 return '%s+' % fm.hexfunc(ctx.p1().node())
414 else:
414 else:
415 return '%s ' % hex
415 return '%s ' % hex
416 else:
416 else:
417 hexfn = fm.hexfunc
417 hexfn = fm.hexfunc
418 formatrev = formathex = str
418 formatrev = formathex = str
419
419
420 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
420 opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser),
421 ('number', ' ', lambda x: x[0].rev(), formatrev),
421 ('number', ' ', lambda x: x[0].rev(), formatrev),
422 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
422 ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex),
423 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
423 ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)),
424 ('file', ' ', lambda x: x[0].path(), str),
424 ('file', ' ', lambda x: x[0].path(), str),
425 ('line_number', ':', lambda x: x[1], str),
425 ('line_number', ':', lambda x: x[1], str),
426 ]
426 ]
427 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
427 fieldnamemap = {'number': 'rev', 'changeset': 'node'}
428
428
429 if (not opts.get('user') and not opts.get('changeset')
429 if (not opts.get('user') and not opts.get('changeset')
430 and not opts.get('date') and not opts.get('file')):
430 and not opts.get('date') and not opts.get('file')):
431 opts['number'] = True
431 opts['number'] = True
432
432
433 linenumber = opts.get('line_number') is not None
433 linenumber = opts.get('line_number') is not None
434 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
434 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
435 raise error.Abort(_('at least one of -n/-c is required for -l'))
435 raise error.Abort(_('at least one of -n/-c is required for -l'))
436
436
437 if fm.isplain():
437 if fm.isplain():
438 def makefunc(get, fmt):
438 def makefunc(get, fmt):
439 return lambda x: fmt(get(x))
439 return lambda x: fmt(get(x))
440 else:
440 else:
441 def makefunc(get, fmt):
441 def makefunc(get, fmt):
442 return get
442 return get
443 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
443 funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap
444 if opts.get(op)]
444 if opts.get(op)]
445 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
445 funcmap[0] = (funcmap[0][0], '') # no separator in front of first column
446 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
446 fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap
447 if opts.get(op))
447 if opts.get(op))
448
448
449 def bad(x, y):
449 def bad(x, y):
450 raise error.Abort("%s: %s" % (x, y))
450 raise error.Abort("%s: %s" % (x, y))
451
451
452 m = scmutil.match(ctx, pats, opts, badfn=bad)
452 m = scmutil.match(ctx, pats, opts, badfn=bad)
453
453
454 follow = not opts.get('no_follow')
454 follow = not opts.get('no_follow')
455 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
455 diffopts = patch.difffeatureopts(ui, opts, section='annotate',
456 whitespace=True)
456 whitespace=True)
457 for abs in ctx.walk(m):
457 for abs in ctx.walk(m):
458 fctx = ctx[abs]
458 fctx = ctx[abs]
459 if not opts.get('text') and util.binary(fctx.data()):
459 if not opts.get('text') and util.binary(fctx.data()):
460 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
460 fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
461 continue
461 continue
462
462
463 lines = fctx.annotate(follow=follow, linenumber=linenumber,
463 lines = fctx.annotate(follow=follow, linenumber=linenumber,
464 diffopts=diffopts)
464 diffopts=diffopts)
465 if not lines:
465 if not lines:
466 continue
466 continue
467 formats = []
467 formats = []
468 pieces = []
468 pieces = []
469
469
470 for f, sep in funcmap:
470 for f, sep in funcmap:
471 l = [f(n) for n, dummy in lines]
471 l = [f(n) for n, dummy in lines]
472 if fm.isplain():
472 if fm.isplain():
473 sizes = [encoding.colwidth(x) for x in l]
473 sizes = [encoding.colwidth(x) for x in l]
474 ml = max(sizes)
474 ml = max(sizes)
475 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
475 formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes])
476 else:
476 else:
477 formats.append(['%s' for x in l])
477 formats.append(['%s' for x in l])
478 pieces.append(l)
478 pieces.append(l)
479
479
480 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
480 for f, p, l in zip(zip(*formats), zip(*pieces), lines):
481 fm.startitem()
481 fm.startitem()
482 fm.write(fields, "".join(f), *p)
482 fm.write(fields, "".join(f), *p)
483 fm.write('line', ": %s", l[1])
483 fm.write('line', ": %s", l[1])
484
484
485 if not lines[-1][1].endswith('\n'):
485 if not lines[-1][1].endswith('\n'):
486 fm.plain('\n')
486 fm.plain('\n')
487
487
488 fm.end()
488 fm.end()
489
489
490 @command('archive',
490 @command('archive',
491 [('', 'no-decode', None, _('do not pass files through decoders')),
491 [('', 'no-decode', None, _('do not pass files through decoders')),
492 ('p', 'prefix', '', _('directory prefix for files in archive'),
492 ('p', 'prefix', '', _('directory prefix for files in archive'),
493 _('PREFIX')),
493 _('PREFIX')),
494 ('r', 'rev', '', _('revision to distribute'), _('REV')),
494 ('r', 'rev', '', _('revision to distribute'), _('REV')),
495 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
495 ('t', 'type', '', _('type of distribution to create'), _('TYPE')),
496 ] + subrepoopts + walkopts,
496 ] + subrepoopts + walkopts,
497 _('[OPTION]... DEST'))
497 _('[OPTION]... DEST'))
498 def archive(ui, repo, dest, **opts):
498 def archive(ui, repo, dest, **opts):
499 '''create an unversioned archive of a repository revision
499 '''create an unversioned archive of a repository revision
500
500
501 By default, the revision used is the parent of the working
501 By default, the revision used is the parent of the working
502 directory; use -r/--rev to specify a different revision.
502 directory; use -r/--rev to specify a different revision.
503
503
504 The archive type is automatically detected based on file
504 The archive type is automatically detected based on file
505 extension (to override, use -t/--type).
505 extension (to override, use -t/--type).
506
506
507 .. container:: verbose
507 .. container:: verbose
508
508
509 Examples:
509 Examples:
510
510
511 - create a zip file containing the 1.0 release::
511 - create a zip file containing the 1.0 release::
512
512
513 hg archive -r 1.0 project-1.0.zip
513 hg archive -r 1.0 project-1.0.zip
514
514
515 - create a tarball excluding .hg files::
515 - create a tarball excluding .hg files::
516
516
517 hg archive project.tar.gz -X ".hg*"
517 hg archive project.tar.gz -X ".hg*"
518
518
519 Valid types are:
519 Valid types are:
520
520
521 :``files``: a directory full of files (default)
521 :``files``: a directory full of files (default)
522 :``tar``: tar archive, uncompressed
522 :``tar``: tar archive, uncompressed
523 :``tbz2``: tar archive, compressed using bzip2
523 :``tbz2``: tar archive, compressed using bzip2
524 :``tgz``: tar archive, compressed using gzip
524 :``tgz``: tar archive, compressed using gzip
525 :``uzip``: zip archive, uncompressed
525 :``uzip``: zip archive, uncompressed
526 :``zip``: zip archive, compressed using deflate
526 :``zip``: zip archive, compressed using deflate
527
527
528 The exact name of the destination archive or directory is given
528 The exact name of the destination archive or directory is given
529 using a format string; see :hg:`help export` for details.
529 using a format string; see :hg:`help export` for details.
530
530
531 Each member added to an archive file has a directory prefix
531 Each member added to an archive file has a directory prefix
532 prepended. Use -p/--prefix to specify a format string for the
532 prepended. Use -p/--prefix to specify a format string for the
533 prefix. The default is the basename of the archive, with suffixes
533 prefix. The default is the basename of the archive, with suffixes
534 removed.
534 removed.
535
535
536 Returns 0 on success.
536 Returns 0 on success.
537 '''
537 '''
538
538
539 ctx = scmutil.revsingle(repo, opts.get('rev'))
539 ctx = scmutil.revsingle(repo, opts.get('rev'))
540 if not ctx:
540 if not ctx:
541 raise error.Abort(_('no working directory: please specify a revision'))
541 raise error.Abort(_('no working directory: please specify a revision'))
542 node = ctx.node()
542 node = ctx.node()
543 dest = cmdutil.makefilename(repo, dest, node)
543 dest = cmdutil.makefilename(repo, dest, node)
544 if os.path.realpath(dest) == repo.root:
544 if os.path.realpath(dest) == repo.root:
545 raise error.Abort(_('repository root cannot be destination'))
545 raise error.Abort(_('repository root cannot be destination'))
546
546
547 kind = opts.get('type') or archival.guesskind(dest) or 'files'
547 kind = opts.get('type') or archival.guesskind(dest) or 'files'
548 prefix = opts.get('prefix')
548 prefix = opts.get('prefix')
549
549
550 if dest == '-':
550 if dest == '-':
551 if kind == 'files':
551 if kind == 'files':
552 raise error.Abort(_('cannot archive plain files to stdout'))
552 raise error.Abort(_('cannot archive plain files to stdout'))
553 dest = cmdutil.makefileobj(repo, dest)
553 dest = cmdutil.makefileobj(repo, dest)
554 if not prefix:
554 if not prefix:
555 prefix = os.path.basename(repo.root) + '-%h'
555 prefix = os.path.basename(repo.root) + '-%h'
556
556
557 prefix = cmdutil.makefilename(repo, prefix, node)
557 prefix = cmdutil.makefilename(repo, prefix, node)
558 matchfn = scmutil.match(ctx, [], opts)
558 matchfn = scmutil.match(ctx, [], opts)
559 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
559 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
560 matchfn, prefix, subrepos=opts.get('subrepos'))
560 matchfn, prefix, subrepos=opts.get('subrepos'))
561
561
562 @command('backout',
562 @command('backout',
563 [('', 'merge', None, _('merge with old dirstate parent after backout')),
563 [('', 'merge', None, _('merge with old dirstate parent after backout')),
564 ('', 'commit', None,
564 ('', 'commit', None,
565 _('commit if no conflicts were encountered (DEPRECATED)')),
565 _('commit if no conflicts were encountered (DEPRECATED)')),
566 ('', 'no-commit', None, _('do not commit')),
566 ('', 'no-commit', None, _('do not commit')),
567 ('', 'parent', '',
567 ('', 'parent', '',
568 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
568 _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
569 ('r', 'rev', '', _('revision to backout'), _('REV')),
569 ('r', 'rev', '', _('revision to backout'), _('REV')),
570 ('e', 'edit', False, _('invoke editor on commit messages')),
570 ('e', 'edit', False, _('invoke editor on commit messages')),
571 ] + mergetoolopts + walkopts + commitopts + commitopts2,
571 ] + mergetoolopts + walkopts + commitopts + commitopts2,
572 _('[OPTION]... [-r] REV'))
572 _('[OPTION]... [-r] REV'))
573 def backout(ui, repo, node=None, rev=None, **opts):
573 def backout(ui, repo, node=None, rev=None, **opts):
574 '''reverse effect of earlier changeset
574 '''reverse effect of earlier changeset
575
575
576 Prepare a new changeset with the effect of REV undone in the
576 Prepare a new changeset with the effect of REV undone in the
577 current working directory. If no conflicts were encountered,
577 current working directory. If no conflicts were encountered,
578 it will be committed immediately.
578 it will be committed immediately.
579
579
580 If REV is the parent of the working directory, then this new changeset
580 If REV is the parent of the working directory, then this new changeset
581 is committed automatically (unless --no-commit is specified).
581 is committed automatically (unless --no-commit is specified).
582
582
583 .. note::
583 .. note::
584
584
585 :hg:`backout` cannot be used to fix either an unwanted or
585 :hg:`backout` cannot be used to fix either an unwanted or
586 incorrect merge.
586 incorrect merge.
587
587
588 .. container:: verbose
588 .. container:: verbose
589
589
590 Examples:
590 Examples:
591
591
592 - Reverse the effect of the parent of the working directory.
592 - Reverse the effect of the parent of the working directory.
593 This backout will be committed immediately::
593 This backout will be committed immediately::
594
594
595 hg backout -r .
595 hg backout -r .
596
596
597 - Reverse the effect of previous bad revision 23::
597 - Reverse the effect of previous bad revision 23::
598
598
599 hg backout -r 23
599 hg backout -r 23
600
600
601 - Reverse the effect of previous bad revision 23 and
601 - Reverse the effect of previous bad revision 23 and
602 leave changes uncommitted::
602 leave changes uncommitted::
603
603
604 hg backout -r 23 --no-commit
604 hg backout -r 23 --no-commit
605 hg commit -m "Backout revision 23"
605 hg commit -m "Backout revision 23"
606
606
607 By default, the pending changeset will have one parent,
607 By default, the pending changeset will have one parent,
608 maintaining a linear history. With --merge, the pending
608 maintaining a linear history. With --merge, the pending
609 changeset will instead have two parents: the old parent of the
609 changeset will instead have two parents: the old parent of the
610 working directory and a new child of REV that simply undoes REV.
610 working directory and a new child of REV that simply undoes REV.
611
611
612 Before version 1.7, the behavior without --merge was equivalent
612 Before version 1.7, the behavior without --merge was equivalent
613 to specifying --merge followed by :hg:`update --clean .` to
613 to specifying --merge followed by :hg:`update --clean .` to
614 cancel the merge and leave the child of REV as a head to be
614 cancel the merge and leave the child of REV as a head to be
615 merged separately.
615 merged separately.
616
616
617 See :hg:`help dates` for a list of formats valid for -d/--date.
617 See :hg:`help dates` for a list of formats valid for -d/--date.
618
618
619 See :hg:`help revert` for a way to restore files to the state
619 See :hg:`help revert` for a way to restore files to the state
620 of another revision.
620 of another revision.
621
621
622 Returns 0 on success, 1 if nothing to backout or there are unresolved
622 Returns 0 on success, 1 if nothing to backout or there are unresolved
623 files.
623 files.
624 '''
624 '''
625 wlock = lock = None
625 wlock = lock = None
626 try:
626 try:
627 wlock = repo.wlock()
627 wlock = repo.wlock()
628 lock = repo.lock()
628 lock = repo.lock()
629 return _dobackout(ui, repo, node, rev, **opts)
629 return _dobackout(ui, repo, node, rev, **opts)
630 finally:
630 finally:
631 release(lock, wlock)
631 release(lock, wlock)
632
632
633 def _dobackout(ui, repo, node=None, rev=None, **opts):
633 def _dobackout(ui, repo, node=None, rev=None, **opts):
634 if opts.get('commit') and opts.get('no_commit'):
634 if opts.get('commit') and opts.get('no_commit'):
635 raise error.Abort(_("cannot use --commit with --no-commit"))
635 raise error.Abort(_("cannot use --commit with --no-commit"))
636 if opts.get('merge') and opts.get('no_commit'):
636 if opts.get('merge') and opts.get('no_commit'):
637 raise error.Abort(_("cannot use --merge with --no-commit"))
637 raise error.Abort(_("cannot use --merge with --no-commit"))
638
638
639 if rev and node:
639 if rev and node:
640 raise error.Abort(_("please specify just one revision"))
640 raise error.Abort(_("please specify just one revision"))
641
641
642 if not rev:
642 if not rev:
643 rev = node
643 rev = node
644
644
645 if not rev:
645 if not rev:
646 raise error.Abort(_("please specify a revision to backout"))
646 raise error.Abort(_("please specify a revision to backout"))
647
647
648 date = opts.get('date')
648 date = opts.get('date')
649 if date:
649 if date:
650 opts['date'] = util.parsedate(date)
650 opts['date'] = util.parsedate(date)
651
651
652 cmdutil.checkunfinished(repo)
652 cmdutil.checkunfinished(repo)
653 cmdutil.bailifchanged(repo)
653 cmdutil.bailifchanged(repo)
654 node = scmutil.revsingle(repo, rev).node()
654 node = scmutil.revsingle(repo, rev).node()
655
655
656 op1, op2 = repo.dirstate.parents()
656 op1, op2 = repo.dirstate.parents()
657 if not repo.changelog.isancestor(node, op1):
657 if not repo.changelog.isancestor(node, op1):
658 raise error.Abort(_('cannot backout change that is not an ancestor'))
658 raise error.Abort(_('cannot backout change that is not an ancestor'))
659
659
660 p1, p2 = repo.changelog.parents(node)
660 p1, p2 = repo.changelog.parents(node)
661 if p1 == nullid:
661 if p1 == nullid:
662 raise error.Abort(_('cannot backout a change with no parents'))
662 raise error.Abort(_('cannot backout a change with no parents'))
663 if p2 != nullid:
663 if p2 != nullid:
664 if not opts.get('parent'):
664 if not opts.get('parent'):
665 raise error.Abort(_('cannot backout a merge changeset'))
665 raise error.Abort(_('cannot backout a merge changeset'))
666 p = repo.lookup(opts['parent'])
666 p = repo.lookup(opts['parent'])
667 if p not in (p1, p2):
667 if p not in (p1, p2):
668 raise error.Abort(_('%s is not a parent of %s') %
668 raise error.Abort(_('%s is not a parent of %s') %
669 (short(p), short(node)))
669 (short(p), short(node)))
670 parent = p
670 parent = p
671 else:
671 else:
672 if opts.get('parent'):
672 if opts.get('parent'):
673 raise error.Abort(_('cannot use --parent on non-merge changeset'))
673 raise error.Abort(_('cannot use --parent on non-merge changeset'))
674 parent = p1
674 parent = p1
675
675
676 # the backout should appear on the same branch
676 # the backout should appear on the same branch
677 branch = repo.dirstate.branch()
677 branch = repo.dirstate.branch()
678 bheads = repo.branchheads(branch)
678 bheads = repo.branchheads(branch)
679 rctx = scmutil.revsingle(repo, hex(parent))
679 rctx = scmutil.revsingle(repo, hex(parent))
680 if not opts.get('merge') and op1 != node:
680 if not opts.get('merge') and op1 != node:
681 dsguard = dirstateguard.dirstateguard(repo, 'backout')
681 dsguard = dirstateguard.dirstateguard(repo, 'backout')
682 try:
682 try:
683 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
683 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
684 'backout')
684 'backout')
685 stats = mergemod.update(repo, parent, True, True, node, False)
685 stats = mergemod.update(repo, parent, True, True, node, False)
686 repo.setparents(op1, op2)
686 repo.setparents(op1, op2)
687 dsguard.close()
687 dsguard.close()
688 hg._showstats(repo, stats)
688 hg._showstats(repo, stats)
689 if stats[3]:
689 if stats[3]:
690 repo.ui.status(_("use 'hg resolve' to retry unresolved "
690 repo.ui.status(_("use 'hg resolve' to retry unresolved "
691 "file merges\n"))
691 "file merges\n"))
692 return 1
692 return 1
693 finally:
693 finally:
694 ui.setconfig('ui', 'forcemerge', '', '')
694 ui.setconfig('ui', 'forcemerge', '', '')
695 lockmod.release(dsguard)
695 lockmod.release(dsguard)
696 else:
696 else:
697 hg.clean(repo, node, show_stats=False)
697 hg.clean(repo, node, show_stats=False)
698 repo.dirstate.setbranch(branch)
698 repo.dirstate.setbranch(branch)
699 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
699 cmdutil.revert(ui, repo, rctx, repo.dirstate.parents())
700
700
701 if opts.get('no_commit'):
701 if opts.get('no_commit'):
702 msg = _("changeset %s backed out, "
702 msg = _("changeset %s backed out, "
703 "don't forget to commit.\n")
703 "don't forget to commit.\n")
704 ui.status(msg % short(node))
704 ui.status(msg % short(node))
705 return 0
705 return 0
706
706
707 def commitfunc(ui, repo, message, match, opts):
707 def commitfunc(ui, repo, message, match, opts):
708 editform = 'backout'
708 editform = 'backout'
709 e = cmdutil.getcommiteditor(editform=editform, **opts)
709 e = cmdutil.getcommiteditor(editform=editform, **opts)
710 if not message:
710 if not message:
711 # we don't translate commit messages
711 # we don't translate commit messages
712 message = "Backed out changeset %s" % short(node)
712 message = "Backed out changeset %s" % short(node)
713 e = cmdutil.getcommiteditor(edit=True, editform=editform)
713 e = cmdutil.getcommiteditor(edit=True, editform=editform)
714 return repo.commit(message, opts.get('user'), opts.get('date'),
714 return repo.commit(message, opts.get('user'), opts.get('date'),
715 match, editor=e)
715 match, editor=e)
716 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
716 newnode = cmdutil.commit(ui, repo, commitfunc, [], opts)
717 if not newnode:
717 if not newnode:
718 ui.status(_("nothing changed\n"))
718 ui.status(_("nothing changed\n"))
719 return 1
719 return 1
720 cmdutil.commitstatus(repo, newnode, branch, bheads)
720 cmdutil.commitstatus(repo, newnode, branch, bheads)
721
721
722 def nice(node):
722 def nice(node):
723 return '%d:%s' % (repo.changelog.rev(node), short(node))
723 return '%d:%s' % (repo.changelog.rev(node), short(node))
724 ui.status(_('changeset %s backs out changeset %s\n') %
724 ui.status(_('changeset %s backs out changeset %s\n') %
725 (nice(repo.changelog.tip()), nice(node)))
725 (nice(repo.changelog.tip()), nice(node)))
726 if opts.get('merge') and op1 != node:
726 if opts.get('merge') and op1 != node:
727 hg.clean(repo, op1, show_stats=False)
727 hg.clean(repo, op1, show_stats=False)
728 ui.status(_('merging with changeset %s\n')
728 ui.status(_('merging with changeset %s\n')
729 % nice(repo.changelog.tip()))
729 % nice(repo.changelog.tip()))
730 try:
730 try:
731 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
731 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
732 'backout')
732 'backout')
733 return hg.merge(repo, hex(repo.changelog.tip()))
733 return hg.merge(repo, hex(repo.changelog.tip()))
734 finally:
734 finally:
735 ui.setconfig('ui', 'forcemerge', '', '')
735 ui.setconfig('ui', 'forcemerge', '', '')
736 return 0
736 return 0
737
737
738 @command('bisect',
738 @command('bisect',
739 [('r', 'reset', False, _('reset bisect state')),
739 [('r', 'reset', False, _('reset bisect state')),
740 ('g', 'good', False, _('mark changeset good')),
740 ('g', 'good', False, _('mark changeset good')),
741 ('b', 'bad', False, _('mark changeset bad')),
741 ('b', 'bad', False, _('mark changeset bad')),
742 ('s', 'skip', False, _('skip testing changeset')),
742 ('s', 'skip', False, _('skip testing changeset')),
743 ('e', 'extend', False, _('extend the bisect range')),
743 ('e', 'extend', False, _('extend the bisect range')),
744 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
744 ('c', 'command', '', _('use command to check changeset state'), _('CMD')),
745 ('U', 'noupdate', False, _('do not update to target'))],
745 ('U', 'noupdate', False, _('do not update to target'))],
746 _("[-gbsr] [-U] [-c CMD] [REV]"))
746 _("[-gbsr] [-U] [-c CMD] [REV]"))
747 def bisect(ui, repo, rev=None, extra=None, command=None,
747 def bisect(ui, repo, rev=None, extra=None, command=None,
748 reset=None, good=None, bad=None, skip=None, extend=None,
748 reset=None, good=None, bad=None, skip=None, extend=None,
749 noupdate=None):
749 noupdate=None):
750 """subdivision search of changesets
750 """subdivision search of changesets
751
751
752 This command helps to find changesets which introduce problems. To
752 This command helps to find changesets which introduce problems. To
753 use, mark the earliest changeset you know exhibits the problem as
753 use, mark the earliest changeset you know exhibits the problem as
754 bad, then mark the latest changeset which is free from the problem
754 bad, then mark the latest changeset which is free from the problem
755 as good. Bisect will update your working directory to a revision
755 as good. Bisect will update your working directory to a revision
756 for testing (unless the -U/--noupdate option is specified). Once
756 for testing (unless the -U/--noupdate option is specified). Once
757 you have performed tests, mark the working directory as good or
757 you have performed tests, mark the working directory as good or
758 bad, and bisect will either update to another candidate changeset
758 bad, and bisect will either update to another candidate changeset
759 or announce that it has found the bad revision.
759 or announce that it has found the bad revision.
760
760
761 As a shortcut, you can also use the revision argument to mark a
761 As a shortcut, you can also use the revision argument to mark a
762 revision as good or bad without checking it out first.
762 revision as good or bad without checking it out first.
763
763
764 If you supply a command, it will be used for automatic bisection.
764 If you supply a command, it will be used for automatic bisection.
765 The environment variable HG_NODE will contain the ID of the
765 The environment variable HG_NODE will contain the ID of the
766 changeset being tested. The exit status of the command will be
766 changeset being tested. The exit status of the command will be
767 used to mark revisions as good or bad: status 0 means good, 125
767 used to mark revisions as good or bad: status 0 means good, 125
768 means to skip the revision, 127 (command not found) will abort the
768 means to skip the revision, 127 (command not found) will abort the
769 bisection, and any other non-zero exit status means the revision
769 bisection, and any other non-zero exit status means the revision
770 is bad.
770 is bad.
771
771
772 .. container:: verbose
772 .. container:: verbose
773
773
774 Some examples:
774 Some examples:
775
775
776 - start a bisection with known bad revision 34, and good revision 12::
776 - start a bisection with known bad revision 34, and good revision 12::
777
777
778 hg bisect --bad 34
778 hg bisect --bad 34
779 hg bisect --good 12
779 hg bisect --good 12
780
780
781 - advance the current bisection by marking current revision as good or
781 - advance the current bisection by marking current revision as good or
782 bad::
782 bad::
783
783
784 hg bisect --good
784 hg bisect --good
785 hg bisect --bad
785 hg bisect --bad
786
786
787 - mark the current revision, or a known revision, to be skipped (e.g. if
787 - mark the current revision, or a known revision, to be skipped (e.g. if
788 that revision is not usable because of another issue)::
788 that revision is not usable because of another issue)::
789
789
790 hg bisect --skip
790 hg bisect --skip
791 hg bisect --skip 23
791 hg bisect --skip 23
792
792
793 - skip all revisions that do not touch directories ``foo`` or ``bar``::
793 - skip all revisions that do not touch directories ``foo`` or ``bar``::
794
794
795 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
795 hg bisect --skip "!( file('path:foo') & file('path:bar') )"
796
796
797 - forget the current bisection::
797 - forget the current bisection::
798
798
799 hg bisect --reset
799 hg bisect --reset
800
800
801 - use 'make && make tests' to automatically find the first broken
801 - use 'make && make tests' to automatically find the first broken
802 revision::
802 revision::
803
803
804 hg bisect --reset
804 hg bisect --reset
805 hg bisect --bad 34
805 hg bisect --bad 34
806 hg bisect --good 12
806 hg bisect --good 12
807 hg bisect --command "make && make tests"
807 hg bisect --command "make && make tests"
808
808
809 - see all changesets whose states are already known in the current
809 - see all changesets whose states are already known in the current
810 bisection::
810 bisection::
811
811
812 hg log -r "bisect(pruned)"
812 hg log -r "bisect(pruned)"
813
813
814 - see the changeset currently being bisected (especially useful
814 - see the changeset currently being bisected (especially useful
815 if running with -U/--noupdate)::
815 if running with -U/--noupdate)::
816
816
817 hg log -r "bisect(current)"
817 hg log -r "bisect(current)"
818
818
819 - see all changesets that took part in the current bisection::
819 - see all changesets that took part in the current bisection::
820
820
821 hg log -r "bisect(range)"
821 hg log -r "bisect(range)"
822
822
823 - you can even get a nice graph::
823 - you can even get a nice graph::
824
824
825 hg log --graph -r "bisect(range)"
825 hg log --graph -r "bisect(range)"
826
826
827 See :hg:`help revsets` for more about the `bisect()` keyword.
827 See :hg:`help revsets` for more about the `bisect()` keyword.
828
828
829 Returns 0 on success.
829 Returns 0 on success.
830 """
830 """
831 # backward compatibility
831 # backward compatibility
832 if rev in "good bad reset init".split():
832 if rev in "good bad reset init".split():
833 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
833 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
834 cmd, rev, extra = rev, extra, None
834 cmd, rev, extra = rev, extra, None
835 if cmd == "good":
835 if cmd == "good":
836 good = True
836 good = True
837 elif cmd == "bad":
837 elif cmd == "bad":
838 bad = True
838 bad = True
839 else:
839 else:
840 reset = True
840 reset = True
841 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
841 elif extra or good + bad + skip + reset + extend + bool(command) > 1:
842 raise error.Abort(_('incompatible arguments'))
842 raise error.Abort(_('incompatible arguments'))
843
843
844 cmdutil.checkunfinished(repo)
844 cmdutil.checkunfinished(repo)
845
845
846 if reset:
846 if reset:
847 hbisect.resetstate(repo)
847 hbisect.resetstate(repo)
848 return
848 return
849
849
850 state = hbisect.load_state(repo)
850 state = hbisect.load_state(repo)
851
851
852 # update state
852 # update state
853 if good or bad or skip:
853 if good or bad or skip:
854 if rev:
854 if rev:
855 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
855 nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])]
856 else:
856 else:
857 nodes = [repo.lookup('.')]
857 nodes = [repo.lookup('.')]
858 if good:
858 if good:
859 state['good'] += nodes
859 state['good'] += nodes
860 elif bad:
860 elif bad:
861 state['bad'] += nodes
861 state['bad'] += nodes
862 elif skip:
862 elif skip:
863 state['skip'] += nodes
863 state['skip'] += nodes
864 hbisect.save_state(repo, state)
864 hbisect.save_state(repo, state)
865 if not (state['good'] and state['bad']):
865 if not (state['good'] and state['bad']):
866 return
866 return
867
867
868 def mayupdate(repo, node, show_stats=True):
868 def mayupdate(repo, node, show_stats=True):
869 """common used update sequence"""
869 """common used update sequence"""
870 if noupdate:
870 if noupdate:
871 return
871 return
872 cmdutil.bailifchanged(repo)
872 cmdutil.bailifchanged(repo)
873 return hg.clean(repo, node, show_stats=show_stats)
873 return hg.clean(repo, node, show_stats=show_stats)
874
874
875 displayer = cmdutil.show_changeset(ui, repo, {})
875 displayer = cmdutil.show_changeset(ui, repo, {})
876
876
877 if command:
877 if command:
878 changesets = 1
878 changesets = 1
879 if noupdate:
879 if noupdate:
880 try:
880 try:
881 node = state['current'][0]
881 node = state['current'][0]
882 except LookupError:
882 except LookupError:
883 raise error.Abort(_('current bisect revision is unknown - '
883 raise error.Abort(_('current bisect revision is unknown - '
884 'start a new bisect to fix'))
884 'start a new bisect to fix'))
885 else:
885 else:
886 node, p2 = repo.dirstate.parents()
886 node, p2 = repo.dirstate.parents()
887 if p2 != nullid:
887 if p2 != nullid:
888 raise error.Abort(_('current bisect revision is a merge'))
888 raise error.Abort(_('current bisect revision is a merge'))
889 if rev:
889 if rev:
890 node = repo[scmutil.revsingle(repo, rev, node)].node()
890 node = repo[scmutil.revsingle(repo, rev, node)].node()
891 try:
891 try:
892 while changesets:
892 while changesets:
893 # update state
893 # update state
894 state['current'] = [node]
894 state['current'] = [node]
895 hbisect.save_state(repo, state)
895 hbisect.save_state(repo, state)
896 status = ui.system(command, environ={'HG_NODE': hex(node)})
896 status = ui.system(command, environ={'HG_NODE': hex(node)})
897 if status == 125:
897 if status == 125:
898 transition = "skip"
898 transition = "skip"
899 elif status == 0:
899 elif status == 0:
900 transition = "good"
900 transition = "good"
901 # status < 0 means process was killed
901 # status < 0 means process was killed
902 elif status == 127:
902 elif status == 127:
903 raise error.Abort(_("failed to execute %s") % command)
903 raise error.Abort(_("failed to execute %s") % command)
904 elif status < 0:
904 elif status < 0:
905 raise error.Abort(_("%s killed") % command)
905 raise error.Abort(_("%s killed") % command)
906 else:
906 else:
907 transition = "bad"
907 transition = "bad"
908 state[transition].append(node)
908 state[transition].append(node)
909 ctx = repo[node]
909 ctx = repo[node]
910 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
910 ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
911 hbisect.checkstate(state)
911 hbisect.checkstate(state)
912 # bisect
912 # bisect
913 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
913 nodes, changesets, bgood = hbisect.bisect(repo.changelog, state)
914 # update to next check
914 # update to next check
915 node = nodes[0]
915 node = nodes[0]
916 mayupdate(repo, node, show_stats=False)
916 mayupdate(repo, node, show_stats=False)
917 finally:
917 finally:
918 state['current'] = [node]
918 state['current'] = [node]
919 hbisect.save_state(repo, state)
919 hbisect.save_state(repo, state)
920 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
920 hbisect.printresult(ui, repo, state, displayer, nodes, bgood)
921 return
921 return
922
922
923 hbisect.checkstate(state)
923 hbisect.checkstate(state)
924
924
925 # actually bisect
925 # actually bisect
926 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
926 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
927 if extend:
927 if extend:
928 if not changesets:
928 if not changesets:
929 extendnode = hbisect.extendrange(repo, state, nodes, good)
929 extendnode = hbisect.extendrange(repo, state, nodes, good)
930 if extendnode is not None:
930 if extendnode is not None:
931 ui.write(_("Extending search to changeset %d:%s\n")
931 ui.write(_("Extending search to changeset %d:%s\n")
932 % (extendnode.rev(), extendnode))
932 % (extendnode.rev(), extendnode))
933 state['current'] = [extendnode.node()]
933 state['current'] = [extendnode.node()]
934 hbisect.save_state(repo, state)
934 hbisect.save_state(repo, state)
935 return mayupdate(repo, extendnode.node())
935 return mayupdate(repo, extendnode.node())
936 raise error.Abort(_("nothing to extend"))
936 raise error.Abort(_("nothing to extend"))
937
937
938 if changesets == 0:
938 if changesets == 0:
939 hbisect.printresult(ui, repo, state, displayer, nodes, good)
939 hbisect.printresult(ui, repo, state, displayer, nodes, good)
940 else:
940 else:
941 assert len(nodes) == 1 # only a single node can be tested next
941 assert len(nodes) == 1 # only a single node can be tested next
942 node = nodes[0]
942 node = nodes[0]
943 # compute the approximate number of remaining tests
943 # compute the approximate number of remaining tests
944 tests, size = 0, 2
944 tests, size = 0, 2
945 while size <= changesets:
945 while size <= changesets:
946 tests, size = tests + 1, size * 2
946 tests, size = tests + 1, size * 2
947 rev = repo.changelog.rev(node)
947 rev = repo.changelog.rev(node)
948 ui.write(_("Testing changeset %d:%s "
948 ui.write(_("Testing changeset %d:%s "
949 "(%d changesets remaining, ~%d tests)\n")
949 "(%d changesets remaining, ~%d tests)\n")
950 % (rev, short(node), changesets, tests))
950 % (rev, short(node), changesets, tests))
951 state['current'] = [node]
951 state['current'] = [node]
952 hbisect.save_state(repo, state)
952 hbisect.save_state(repo, state)
953 return mayupdate(repo, node)
953 return mayupdate(repo, node)
954
954
955 @command('bookmarks|bookmark',
955 @command('bookmarks|bookmark',
956 [('f', 'force', False, _('force')),
956 [('f', 'force', False, _('force')),
957 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
957 ('r', 'rev', '', _('revision for bookmark action'), _('REV')),
958 ('d', 'delete', False, _('delete a given bookmark')),
958 ('d', 'delete', False, _('delete a given bookmark')),
959 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
959 ('m', 'rename', '', _('rename a given bookmark'), _('OLD')),
960 ('i', 'inactive', False, _('mark a bookmark inactive')),
960 ('i', 'inactive', False, _('mark a bookmark inactive')),
961 ] + formatteropts,
961 ] + formatteropts,
962 _('hg bookmarks [OPTIONS]... [NAME]...'))
962 _('hg bookmarks [OPTIONS]... [NAME]...'))
963 def bookmark(ui, repo, *names, **opts):
963 def bookmark(ui, repo, *names, **opts):
964 '''create a new bookmark or list existing bookmarks
964 '''create a new bookmark or list existing bookmarks
965
965
966 Bookmarks are labels on changesets to help track lines of development.
966 Bookmarks are labels on changesets to help track lines of development.
967 Bookmarks are unversioned and can be moved, renamed and deleted.
967 Bookmarks are unversioned and can be moved, renamed and deleted.
968 Deleting or moving a bookmark has no effect on the associated changesets.
968 Deleting or moving a bookmark has no effect on the associated changesets.
969
969
970 Creating or updating to a bookmark causes it to be marked as 'active'.
970 Creating or updating to a bookmark causes it to be marked as 'active'.
971 The active bookmark is indicated with a '*'.
971 The active bookmark is indicated with a '*'.
972 When a commit is made, the active bookmark will advance to the new commit.
972 When a commit is made, the active bookmark will advance to the new commit.
973 A plain :hg:`update` will also advance an active bookmark, if possible.
973 A plain :hg:`update` will also advance an active bookmark, if possible.
974 Updating away from a bookmark will cause it to be deactivated.
974 Updating away from a bookmark will cause it to be deactivated.
975
975
976 Bookmarks can be pushed and pulled between repositories (see
976 Bookmarks can be pushed and pulled between repositories (see
977 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
977 :hg:`help push` and :hg:`help pull`). If a shared bookmark has
978 diverged, a new 'divergent bookmark' of the form 'name@path' will
978 diverged, a new 'divergent bookmark' of the form 'name@path' will
979 be created. Using :hg:`merge` will resolve the divergence.
979 be created. Using :hg:`merge` will resolve the divergence.
980
980
981 A bookmark named '@' has the special property that :hg:`clone` will
981 A bookmark named '@' has the special property that :hg:`clone` will
982 check it out by default if it exists.
982 check it out by default if it exists.
983
983
984 .. container:: verbose
984 .. container:: verbose
985
985
986 Examples:
986 Examples:
987
987
988 - create an active bookmark for a new line of development::
988 - create an active bookmark for a new line of development::
989
989
990 hg book new-feature
990 hg book new-feature
991
991
992 - create an inactive bookmark as a place marker::
992 - create an inactive bookmark as a place marker::
993
993
994 hg book -i reviewed
994 hg book -i reviewed
995
995
996 - create an inactive bookmark on another changeset::
996 - create an inactive bookmark on another changeset::
997
997
998 hg book -r .^ tested
998 hg book -r .^ tested
999
999
1000 - rename bookmark turkey to dinner::
1000 - rename bookmark turkey to dinner::
1001
1001
1002 hg book -m turkey dinner
1002 hg book -m turkey dinner
1003
1003
1004 - move the '@' bookmark from another branch::
1004 - move the '@' bookmark from another branch::
1005
1005
1006 hg book -f @
1006 hg book -f @
1007 '''
1007 '''
1008 force = opts.get('force')
1008 force = opts.get('force')
1009 rev = opts.get('rev')
1009 rev = opts.get('rev')
1010 delete = opts.get('delete')
1010 delete = opts.get('delete')
1011 rename = opts.get('rename')
1011 rename = opts.get('rename')
1012 inactive = opts.get('inactive')
1012 inactive = opts.get('inactive')
1013
1013
1014 def checkformat(mark):
1014 def checkformat(mark):
1015 mark = mark.strip()
1015 mark = mark.strip()
1016 if not mark:
1016 if not mark:
1017 raise error.Abort(_("bookmark names cannot consist entirely of "
1017 raise error.Abort(_("bookmark names cannot consist entirely of "
1018 "whitespace"))
1018 "whitespace"))
1019 scmutil.checknewlabel(repo, mark, 'bookmark')
1019 scmutil.checknewlabel(repo, mark, 'bookmark')
1020 return mark
1020 return mark
1021
1021
1022 def checkconflict(repo, mark, cur, force=False, target=None):
1022 def checkconflict(repo, mark, cur, force=False, target=None):
1023 if mark in marks and not force:
1023 if mark in marks and not force:
1024 if target:
1024 if target:
1025 if marks[mark] == target and target == cur:
1025 if marks[mark] == target and target == cur:
1026 # re-activating a bookmark
1026 # re-activating a bookmark
1027 return
1027 return
1028 anc = repo.changelog.ancestors([repo[target].rev()])
1028 anc = repo.changelog.ancestors([repo[target].rev()])
1029 bmctx = repo[marks[mark]]
1029 bmctx = repo[marks[mark]]
1030 divs = [repo[b].node() for b in marks
1030 divs = [repo[b].node() for b in marks
1031 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1031 if b.split('@', 1)[0] == mark.split('@', 1)[0]]
1032
1032
1033 # allow resolving a single divergent bookmark even if moving
1033 # allow resolving a single divergent bookmark even if moving
1034 # the bookmark across branches when a revision is specified
1034 # the bookmark across branches when a revision is specified
1035 # that contains a divergent bookmark
1035 # that contains a divergent bookmark
1036 if bmctx.rev() not in anc and target in divs:
1036 if bmctx.rev() not in anc and target in divs:
1037 bookmarks.deletedivergent(repo, [target], mark)
1037 bookmarks.deletedivergent(repo, [target], mark)
1038 return
1038 return
1039
1039
1040 deletefrom = [b for b in divs
1040 deletefrom = [b for b in divs
1041 if repo[b].rev() in anc or b == target]
1041 if repo[b].rev() in anc or b == target]
1042 bookmarks.deletedivergent(repo, deletefrom, mark)
1042 bookmarks.deletedivergent(repo, deletefrom, mark)
1043 if bookmarks.validdest(repo, bmctx, repo[target]):
1043 if bookmarks.validdest(repo, bmctx, repo[target]):
1044 ui.status(_("moving bookmark '%s' forward from %s\n") %
1044 ui.status(_("moving bookmark '%s' forward from %s\n") %
1045 (mark, short(bmctx.node())))
1045 (mark, short(bmctx.node())))
1046 return
1046 return
1047 raise error.Abort(_("bookmark '%s' already exists "
1047 raise error.Abort(_("bookmark '%s' already exists "
1048 "(use -f to force)") % mark)
1048 "(use -f to force)") % mark)
1049 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1049 if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
1050 and not force):
1050 and not force):
1051 raise error.Abort(
1051 raise error.Abort(
1052 _("a bookmark cannot have the name of an existing branch"))
1052 _("a bookmark cannot have the name of an existing branch"))
1053
1053
1054 if delete and rename:
1054 if delete and rename:
1055 raise error.Abort(_("--delete and --rename are incompatible"))
1055 raise error.Abort(_("--delete and --rename are incompatible"))
1056 if delete and rev:
1056 if delete and rev:
1057 raise error.Abort(_("--rev is incompatible with --delete"))
1057 raise error.Abort(_("--rev is incompatible with --delete"))
1058 if rename and rev:
1058 if rename and rev:
1059 raise error.Abort(_("--rev is incompatible with --rename"))
1059 raise error.Abort(_("--rev is incompatible with --rename"))
1060 if not names and (delete or rev):
1060 if not names and (delete or rev):
1061 raise error.Abort(_("bookmark name required"))
1061 raise error.Abort(_("bookmark name required"))
1062
1062
1063 if delete or rename or names or inactive:
1063 if delete or rename or names or inactive:
1064 wlock = lock = tr = None
1064 wlock = lock = tr = None
1065 try:
1065 try:
1066 wlock = repo.wlock()
1066 wlock = repo.wlock()
1067 lock = repo.lock()
1067 lock = repo.lock()
1068 cur = repo.changectx('.').node()
1068 cur = repo.changectx('.').node()
1069 marks = repo._bookmarks
1069 marks = repo._bookmarks
1070 if delete:
1070 if delete:
1071 tr = repo.transaction('bookmark')
1071 tr = repo.transaction('bookmark')
1072 for mark in names:
1072 for mark in names:
1073 if mark not in marks:
1073 if mark not in marks:
1074 raise error.Abort(_("bookmark '%s' does not exist") %
1074 raise error.Abort(_("bookmark '%s' does not exist") %
1075 mark)
1075 mark)
1076 if mark == repo._activebookmark:
1076 if mark == repo._activebookmark:
1077 bookmarks.deactivate(repo)
1077 bookmarks.deactivate(repo)
1078 del marks[mark]
1078 del marks[mark]
1079
1079
1080 elif rename:
1080 elif rename:
1081 tr = repo.transaction('bookmark')
1081 tr = repo.transaction('bookmark')
1082 if not names:
1082 if not names:
1083 raise error.Abort(_("new bookmark name required"))
1083 raise error.Abort(_("new bookmark name required"))
1084 elif len(names) > 1:
1084 elif len(names) > 1:
1085 raise error.Abort(_("only one new bookmark name allowed"))
1085 raise error.Abort(_("only one new bookmark name allowed"))
1086 mark = checkformat(names[0])
1086 mark = checkformat(names[0])
1087 if rename not in marks:
1087 if rename not in marks:
1088 raise error.Abort(_("bookmark '%s' does not exist")
1088 raise error.Abort(_("bookmark '%s' does not exist")
1089 % rename)
1089 % rename)
1090 checkconflict(repo, mark, cur, force)
1090 checkconflict(repo, mark, cur, force)
1091 marks[mark] = marks[rename]
1091 marks[mark] = marks[rename]
1092 if repo._activebookmark == rename and not inactive:
1092 if repo._activebookmark == rename and not inactive:
1093 bookmarks.activate(repo, mark)
1093 bookmarks.activate(repo, mark)
1094 del marks[rename]
1094 del marks[rename]
1095 elif names:
1095 elif names:
1096 tr = repo.transaction('bookmark')
1096 tr = repo.transaction('bookmark')
1097 newact = None
1097 newact = None
1098 for mark in names:
1098 for mark in names:
1099 mark = checkformat(mark)
1099 mark = checkformat(mark)
1100 if newact is None:
1100 if newact is None:
1101 newact = mark
1101 newact = mark
1102 if inactive and mark == repo._activebookmark:
1102 if inactive and mark == repo._activebookmark:
1103 bookmarks.deactivate(repo)
1103 bookmarks.deactivate(repo)
1104 return
1104 return
1105 tgt = cur
1105 tgt = cur
1106 if rev:
1106 if rev:
1107 tgt = scmutil.revsingle(repo, rev).node()
1107 tgt = scmutil.revsingle(repo, rev).node()
1108 checkconflict(repo, mark, cur, force, tgt)
1108 checkconflict(repo, mark, cur, force, tgt)
1109 marks[mark] = tgt
1109 marks[mark] = tgt
1110 if not inactive and cur == marks[newact] and not rev:
1110 if not inactive and cur == marks[newact] and not rev:
1111 bookmarks.activate(repo, newact)
1111 bookmarks.activate(repo, newact)
1112 elif cur != tgt and newact == repo._activebookmark:
1112 elif cur != tgt and newact == repo._activebookmark:
1113 bookmarks.deactivate(repo)
1113 bookmarks.deactivate(repo)
1114 elif inactive:
1114 elif inactive:
1115 if len(marks) == 0:
1115 if len(marks) == 0:
1116 ui.status(_("no bookmarks set\n"))
1116 ui.status(_("no bookmarks set\n"))
1117 elif not repo._activebookmark:
1117 elif not repo._activebookmark:
1118 ui.status(_("no active bookmark\n"))
1118 ui.status(_("no active bookmark\n"))
1119 else:
1119 else:
1120 bookmarks.deactivate(repo)
1120 bookmarks.deactivate(repo)
1121 if tr is not None:
1121 if tr is not None:
1122 marks.recordchange(tr)
1122 marks.recordchange(tr)
1123 tr.close()
1123 tr.close()
1124 finally:
1124 finally:
1125 lockmod.release(tr, lock, wlock)
1125 lockmod.release(tr, lock, wlock)
1126 else: # show bookmarks
1126 else: # show bookmarks
1127 fm = ui.formatter('bookmarks', opts)
1127 fm = ui.formatter('bookmarks', opts)
1128 hexfn = fm.hexfunc
1128 hexfn = fm.hexfunc
1129 marks = repo._bookmarks
1129 marks = repo._bookmarks
1130 if len(marks) == 0 and fm.isplain():
1130 if len(marks) == 0 and fm.isplain():
1131 ui.status(_("no bookmarks set\n"))
1131 ui.status(_("no bookmarks set\n"))
1132 for bmark, n in sorted(marks.iteritems()):
1132 for bmark, n in sorted(marks.iteritems()):
1133 active = repo._activebookmark
1133 active = repo._activebookmark
1134 if bmark == active:
1134 if bmark == active:
1135 prefix, label = '*', activebookmarklabel
1135 prefix, label = '*', activebookmarklabel
1136 else:
1136 else:
1137 prefix, label = ' ', ''
1137 prefix, label = ' ', ''
1138
1138
1139 fm.startitem()
1139 fm.startitem()
1140 if not ui.quiet:
1140 if not ui.quiet:
1141 fm.plain(' %s ' % prefix, label=label)
1141 fm.plain(' %s ' % prefix, label=label)
1142 fm.write('bookmark', '%s', bmark, label=label)
1142 fm.write('bookmark', '%s', bmark, label=label)
1143 pad = " " * (25 - encoding.colwidth(bmark))
1143 pad = " " * (25 - encoding.colwidth(bmark))
1144 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1144 fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s',
1145 repo.changelog.rev(n), hexfn(n), label=label)
1145 repo.changelog.rev(n), hexfn(n), label=label)
1146 fm.data(active=(bmark == active))
1146 fm.data(active=(bmark == active))
1147 fm.plain('\n')
1147 fm.plain('\n')
1148 fm.end()
1148 fm.end()
1149
1149
1150 @command('branch',
1150 @command('branch',
1151 [('f', 'force', None,
1151 [('f', 'force', None,
1152 _('set branch name even if it shadows an existing branch')),
1152 _('set branch name even if it shadows an existing branch')),
1153 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1153 ('C', 'clean', None, _('reset branch name to parent branch name'))],
1154 _('[-fC] [NAME]'))
1154 _('[-fC] [NAME]'))
1155 def branch(ui, repo, label=None, **opts):
1155 def branch(ui, repo, label=None, **opts):
1156 """set or show the current branch name
1156 """set or show the current branch name
1157
1157
1158 .. note::
1158 .. note::
1159
1159
1160 Branch names are permanent and global. Use :hg:`bookmark` to create a
1160 Branch names are permanent and global. Use :hg:`bookmark` to create a
1161 light-weight bookmark instead. See :hg:`help glossary` for more
1161 light-weight bookmark instead. See :hg:`help glossary` for more
1162 information about named branches and bookmarks.
1162 information about named branches and bookmarks.
1163
1163
1164 With no argument, show the current branch name. With one argument,
1164 With no argument, show the current branch name. With one argument,
1165 set the working directory branch name (the branch will not exist
1165 set the working directory branch name (the branch will not exist
1166 in the repository until the next commit). Standard practice
1166 in the repository until the next commit). Standard practice
1167 recommends that primary development take place on the 'default'
1167 recommends that primary development take place on the 'default'
1168 branch.
1168 branch.
1169
1169
1170 Unless -f/--force is specified, branch will not let you set a
1170 Unless -f/--force is specified, branch will not let you set a
1171 branch name that already exists.
1171 branch name that already exists.
1172
1172
1173 Use -C/--clean to reset the working directory branch to that of
1173 Use -C/--clean to reset the working directory branch to that of
1174 the parent of the working directory, negating a previous branch
1174 the parent of the working directory, negating a previous branch
1175 change.
1175 change.
1176
1176
1177 Use the command :hg:`update` to switch to an existing branch. Use
1177 Use the command :hg:`update` to switch to an existing branch. Use
1178 :hg:`commit --close-branch` to mark this branch head as closed.
1178 :hg:`commit --close-branch` to mark this branch head as closed.
1179 When all heads of a branch are closed, the branch will be
1179 When all heads of a branch are closed, the branch will be
1180 considered closed.
1180 considered closed.
1181
1181
1182 Returns 0 on success.
1182 Returns 0 on success.
1183 """
1183 """
1184 if label:
1184 if label:
1185 label = label.strip()
1185 label = label.strip()
1186
1186
1187 if not opts.get('clean') and not label:
1187 if not opts.get('clean') and not label:
1188 ui.write("%s\n" % repo.dirstate.branch())
1188 ui.write("%s\n" % repo.dirstate.branch())
1189 return
1189 return
1190
1190
1191 with repo.wlock():
1191 with repo.wlock():
1192 if opts.get('clean'):
1192 if opts.get('clean'):
1193 label = repo[None].p1().branch()
1193 label = repo[None].p1().branch()
1194 repo.dirstate.setbranch(label)
1194 repo.dirstate.setbranch(label)
1195 ui.status(_('reset working directory to branch %s\n') % label)
1195 ui.status(_('reset working directory to branch %s\n') % label)
1196 elif label:
1196 elif label:
1197 if not opts.get('force') and label in repo.branchmap():
1197 if not opts.get('force') and label in repo.branchmap():
1198 if label not in [p.branch() for p in repo[None].parents()]:
1198 if label not in [p.branch() for p in repo[None].parents()]:
1199 raise error.Abort(_('a branch of the same name already'
1199 raise error.Abort(_('a branch of the same name already'
1200 ' exists'),
1200 ' exists'),
1201 # i18n: "it" refers to an existing branch
1201 # i18n: "it" refers to an existing branch
1202 hint=_("use 'hg update' to switch to it"))
1202 hint=_("use 'hg update' to switch to it"))
1203 scmutil.checknewlabel(repo, label, 'branch')
1203 scmutil.checknewlabel(repo, label, 'branch')
1204 repo.dirstate.setbranch(label)
1204 repo.dirstate.setbranch(label)
1205 ui.status(_('marked working directory as branch %s\n') % label)
1205 ui.status(_('marked working directory as branch %s\n') % label)
1206
1206
1207 # find any open named branches aside from default
1207 # find any open named branches aside from default
1208 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1208 others = [n for n, h, t, c in repo.branchmap().iterbranches()
1209 if n != "default" and not c]
1209 if n != "default" and not c]
1210 if not others:
1210 if not others:
1211 ui.status(_('(branches are permanent and global, '
1211 ui.status(_('(branches are permanent and global, '
1212 'did you want a bookmark?)\n'))
1212 'did you want a bookmark?)\n'))
1213
1213
1214 @command('branches',
1214 @command('branches',
1215 [('a', 'active', False,
1215 [('a', 'active', False,
1216 _('show only branches that have unmerged heads (DEPRECATED)')),
1216 _('show only branches that have unmerged heads (DEPRECATED)')),
1217 ('c', 'closed', False, _('show normal and closed branches')),
1217 ('c', 'closed', False, _('show normal and closed branches')),
1218 ] + formatteropts,
1218 ] + formatteropts,
1219 _('[-c]'))
1219 _('[-c]'))
1220 def branches(ui, repo, active=False, closed=False, **opts):
1220 def branches(ui, repo, active=False, closed=False, **opts):
1221 """list repository named branches
1221 """list repository named branches
1222
1222
1223 List the repository's named branches, indicating which ones are
1223 List the repository's named branches, indicating which ones are
1224 inactive. If -c/--closed is specified, also list branches which have
1224 inactive. If -c/--closed is specified, also list branches which have
1225 been marked closed (see :hg:`commit --close-branch`).
1225 been marked closed (see :hg:`commit --close-branch`).
1226
1226
1227 Use the command :hg:`update` to switch to an existing branch.
1227 Use the command :hg:`update` to switch to an existing branch.
1228
1228
1229 Returns 0.
1229 Returns 0.
1230 """
1230 """
1231
1231
1232 fm = ui.formatter('branches', opts)
1232 fm = ui.formatter('branches', opts)
1233 hexfunc = fm.hexfunc
1233 hexfunc = fm.hexfunc
1234
1234
1235 allheads = set(repo.heads())
1235 allheads = set(repo.heads())
1236 branches = []
1236 branches = []
1237 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1237 for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
1238 isactive = not isclosed and bool(set(heads) & allheads)
1238 isactive = not isclosed and bool(set(heads) & allheads)
1239 branches.append((tag, repo[tip], isactive, not isclosed))
1239 branches.append((tag, repo[tip], isactive, not isclosed))
1240 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1240 branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]),
1241 reverse=True)
1241 reverse=True)
1242
1242
1243 for tag, ctx, isactive, isopen in branches:
1243 for tag, ctx, isactive, isopen in branches:
1244 if active and not isactive:
1244 if active and not isactive:
1245 continue
1245 continue
1246 if isactive:
1246 if isactive:
1247 label = 'branches.active'
1247 label = 'branches.active'
1248 notice = ''
1248 notice = ''
1249 elif not isopen:
1249 elif not isopen:
1250 if not closed:
1250 if not closed:
1251 continue
1251 continue
1252 label = 'branches.closed'
1252 label = 'branches.closed'
1253 notice = _(' (closed)')
1253 notice = _(' (closed)')
1254 else:
1254 else:
1255 label = 'branches.inactive'
1255 label = 'branches.inactive'
1256 notice = _(' (inactive)')
1256 notice = _(' (inactive)')
1257 current = (tag == repo.dirstate.branch())
1257 current = (tag == repo.dirstate.branch())
1258 if current:
1258 if current:
1259 label = 'branches.current'
1259 label = 'branches.current'
1260
1260
1261 fm.startitem()
1261 fm.startitem()
1262 fm.write('branch', '%s', tag, label=label)
1262 fm.write('branch', '%s', tag, label=label)
1263 rev = ctx.rev()
1263 rev = ctx.rev()
1264 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1264 padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0)
1265 fmt = ' ' * padsize + ' %d:%s'
1265 fmt = ' ' * padsize + ' %d:%s'
1266 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1266 fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()),
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1267 label='log.changeset changeset.%s' % ctx.phasestr())
1268 fm.data(active=isactive, closed=not isopen, current=current)
1268 fm.data(active=isactive, closed=not isopen, current=current)
1269 if not ui.quiet:
1269 if not ui.quiet:
1270 fm.plain(notice)
1270 fm.plain(notice)
1271 fm.plain('\n')
1271 fm.plain('\n')
1272 fm.end()
1272 fm.end()
1273
1273
1274 @command('bundle',
1274 @command('bundle',
1275 [('f', 'force', None, _('run even when the destination is unrelated')),
1275 [('f', 'force', None, _('run even when the destination is unrelated')),
1276 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1276 ('r', 'rev', [], _('a changeset intended to be added to the destination'),
1277 _('REV')),
1277 _('REV')),
1278 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1278 ('b', 'branch', [], _('a specific branch you would like to bundle'),
1279 _('BRANCH')),
1279 _('BRANCH')),
1280 ('', 'base', [],
1280 ('', 'base', [],
1281 _('a base changeset assumed to be available at the destination'),
1281 _('a base changeset assumed to be available at the destination'),
1282 _('REV')),
1282 _('REV')),
1283 ('a', 'all', None, _('bundle all changesets in the repository')),
1283 ('a', 'all', None, _('bundle all changesets in the repository')),
1284 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1284 ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')),
1285 ] + remoteopts,
1285 ] + remoteopts,
1286 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1286 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]'))
1287 def bundle(ui, repo, fname, dest=None, **opts):
1287 def bundle(ui, repo, fname, dest=None, **opts):
1288 """create a changegroup file
1288 """create a changegroup file
1289
1289
1290 Generate a changegroup file collecting changesets to be added
1290 Generate a changegroup file collecting changesets to be added
1291 to a repository.
1291 to a repository.
1292
1292
1293 To create a bundle containing all changesets, use -a/--all
1293 To create a bundle containing all changesets, use -a/--all
1294 (or --base null). Otherwise, hg assumes the destination will have
1294 (or --base null). Otherwise, hg assumes the destination will have
1295 all the nodes you specify with --base parameters. Otherwise, hg
1295 all the nodes you specify with --base parameters. Otherwise, hg
1296 will assume the repository has all the nodes in destination, or
1296 will assume the repository has all the nodes in destination, or
1297 default-push/default if no destination is specified.
1297 default-push/default if no destination is specified.
1298
1298
1299 You can change bundle format with the -t/--type option. You can
1299 You can change bundle format with the -t/--type option. You can
1300 specify a compression, a bundle version or both using a dash
1300 specify a compression, a bundle version or both using a dash
1301 (comp-version). The available compression methods are: none, bzip2,
1301 (comp-version). The available compression methods are: none, bzip2,
1302 and gzip (by default, bundles are compressed using bzip2). The
1302 and gzip (by default, bundles are compressed using bzip2). The
1303 available formats are: v1, v2 (default to most suitable).
1303 available formats are: v1, v2 (default to most suitable).
1304
1304
1305 The bundle file can then be transferred using conventional means
1305 The bundle file can then be transferred using conventional means
1306 and applied to another repository with the unbundle or pull
1306 and applied to another repository with the unbundle or pull
1307 command. This is useful when direct push and pull are not
1307 command. This is useful when direct push and pull are not
1308 available or when exporting an entire repository is undesirable.
1308 available or when exporting an entire repository is undesirable.
1309
1309
1310 Applying bundles preserves all changeset contents including
1310 Applying bundles preserves all changeset contents including
1311 permissions, copy/rename information, and revision history.
1311 permissions, copy/rename information, and revision history.
1312
1312
1313 Returns 0 on success, 1 if no changes found.
1313 Returns 0 on success, 1 if no changes found.
1314 """
1314 """
1315 revs = None
1315 revs = None
1316 if 'rev' in opts:
1316 if 'rev' in opts:
1317 revstrings = opts['rev']
1317 revstrings = opts['rev']
1318 revs = scmutil.revrange(repo, revstrings)
1318 revs = scmutil.revrange(repo, revstrings)
1319 if revstrings and not revs:
1319 if revstrings and not revs:
1320 raise error.Abort(_('no commits to bundle'))
1320 raise error.Abort(_('no commits to bundle'))
1321
1321
1322 bundletype = opts.get('type', 'bzip2').lower()
1322 bundletype = opts.get('type', 'bzip2').lower()
1323 try:
1323 try:
1324 bcompression, cgversion, params = exchange.parsebundlespec(
1324 bcompression, cgversion, params = exchange.parsebundlespec(
1325 repo, bundletype, strict=False)
1325 repo, bundletype, strict=False)
1326 except error.UnsupportedBundleSpecification as e:
1326 except error.UnsupportedBundleSpecification as e:
1327 raise error.Abort(str(e),
1327 raise error.Abort(str(e),
1328 hint=_("see 'hg help bundle' for supported "
1328 hint=_("see 'hg help bundle' for supported "
1329 "values for --type"))
1329 "values for --type"))
1330
1330
1331 # Packed bundles are a pseudo bundle format for now.
1331 # Packed bundles are a pseudo bundle format for now.
1332 if cgversion == 's1':
1332 if cgversion == 's1':
1333 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1333 raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'),
1334 hint=_("use 'hg debugcreatestreamclonebundle'"))
1334 hint=_("use 'hg debugcreatestreamclonebundle'"))
1335
1335
1336 if opts.get('all'):
1336 if opts.get('all'):
1337 if dest:
1337 if dest:
1338 raise error.Abort(_("--all is incompatible with specifying "
1338 raise error.Abort(_("--all is incompatible with specifying "
1339 "a destination"))
1339 "a destination"))
1340 if opts.get('base'):
1340 if opts.get('base'):
1341 ui.warn(_("ignoring --base because --all was specified\n"))
1341 ui.warn(_("ignoring --base because --all was specified\n"))
1342 base = ['null']
1342 base = ['null']
1343 else:
1343 else:
1344 base = scmutil.revrange(repo, opts.get('base'))
1344 base = scmutil.revrange(repo, opts.get('base'))
1345 # TODO: get desired bundlecaps from command line.
1345 # TODO: get desired bundlecaps from command line.
1346 bundlecaps = None
1346 bundlecaps = None
1347 if cgversion not in changegroup.supportedoutgoingversions(repo):
1347 if cgversion not in changegroup.supportedoutgoingversions(repo):
1348 raise error.Abort(_("repository does not support bundle version %s") %
1348 raise error.Abort(_("repository does not support bundle version %s") %
1349 cgversion)
1349 cgversion)
1350
1350
1351 if base:
1351 if base:
1352 if dest:
1352 if dest:
1353 raise error.Abort(_("--base is incompatible with specifying "
1353 raise error.Abort(_("--base is incompatible with specifying "
1354 "a destination"))
1354 "a destination"))
1355 common = [repo.lookup(rev) for rev in base]
1355 common = [repo.lookup(rev) for rev in base]
1356 heads = revs and map(repo.lookup, revs) or None
1356 heads = revs and map(repo.lookup, revs) or None
1357 outgoing = discovery.outgoing(repo, common, heads)
1357 outgoing = discovery.outgoing(repo, common, heads)
1358 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1358 cg = changegroup.getchangegroup(repo, 'bundle', outgoing,
1359 bundlecaps=bundlecaps,
1359 bundlecaps=bundlecaps,
1360 version=cgversion)
1360 version=cgversion)
1361 outgoing = None
1361 outgoing = None
1362 else:
1362 else:
1363 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1363 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1364 dest, branches = hg.parseurl(dest, opts.get('branch'))
1364 dest, branches = hg.parseurl(dest, opts.get('branch'))
1365 other = hg.peer(repo, opts, dest)
1365 other = hg.peer(repo, opts, dest)
1366 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1366 revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
1367 heads = revs and map(repo.lookup, revs) or revs
1367 heads = revs and map(repo.lookup, revs) or revs
1368 outgoing = discovery.findcommonoutgoing(repo, other,
1368 outgoing = discovery.findcommonoutgoing(repo, other,
1369 onlyheads=heads,
1369 onlyheads=heads,
1370 force=opts.get('force'),
1370 force=opts.get('force'),
1371 portable=True)
1371 portable=True)
1372 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1372 cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing,
1373 bundlecaps, version=cgversion)
1373 bundlecaps, version=cgversion)
1374 if not cg:
1374 if not cg:
1375 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1375 scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
1376 return 1
1376 return 1
1377
1377
1378 if cgversion == '01': #bundle1
1378 if cgversion == '01': #bundle1
1379 if bcompression is None:
1379 if bcompression is None:
1380 bcompression = 'UN'
1380 bcompression = 'UN'
1381 bversion = 'HG10' + bcompression
1381 bversion = 'HG10' + bcompression
1382 bcompression = None
1382 bcompression = None
1383 else:
1383 else:
1384 assert cgversion == '02'
1384 assert cgversion == '02'
1385 bversion = 'HG20'
1385 bversion = 'HG20'
1386
1386
1387 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1387 bundle2.writebundle(ui, cg, fname, bversion, compression=bcompression)
1388
1388
1389 @command('cat',
1389 @command('cat',
1390 [('o', 'output', '',
1390 [('o', 'output', '',
1391 _('print output to file with formatted name'), _('FORMAT')),
1391 _('print output to file with formatted name'), _('FORMAT')),
1392 ('r', 'rev', '', _('print the given revision'), _('REV')),
1392 ('r', 'rev', '', _('print the given revision'), _('REV')),
1393 ('', 'decode', None, _('apply any matching decode filter')),
1393 ('', 'decode', None, _('apply any matching decode filter')),
1394 ] + walkopts,
1394 ] + walkopts,
1395 _('[OPTION]... FILE...'),
1395 _('[OPTION]... FILE...'),
1396 inferrepo=True)
1396 inferrepo=True)
1397 def cat(ui, repo, file1, *pats, **opts):
1397 def cat(ui, repo, file1, *pats, **opts):
1398 """output the current or given revision of files
1398 """output the current or given revision of files
1399
1399
1400 Print the specified files as they were at the given revision. If
1400 Print the specified files as they were at the given revision. If
1401 no revision is given, the parent of the working directory is used.
1401 no revision is given, the parent of the working directory is used.
1402
1402
1403 Output may be to a file, in which case the name of the file is
1403 Output may be to a file, in which case the name of the file is
1404 given using a format string. The formatting rules as follows:
1404 given using a format string. The formatting rules as follows:
1405
1405
1406 :``%%``: literal "%" character
1406 :``%%``: literal "%" character
1407 :``%s``: basename of file being printed
1407 :``%s``: basename of file being printed
1408 :``%d``: dirname of file being printed, or '.' if in repository root
1408 :``%d``: dirname of file being printed, or '.' if in repository root
1409 :``%p``: root-relative path name of file being printed
1409 :``%p``: root-relative path name of file being printed
1410 :``%H``: changeset hash (40 hexadecimal digits)
1410 :``%H``: changeset hash (40 hexadecimal digits)
1411 :``%R``: changeset revision number
1411 :``%R``: changeset revision number
1412 :``%h``: short-form changeset hash (12 hexadecimal digits)
1412 :``%h``: short-form changeset hash (12 hexadecimal digits)
1413 :``%r``: zero-padded changeset revision number
1413 :``%r``: zero-padded changeset revision number
1414 :``%b``: basename of the exporting repository
1414 :``%b``: basename of the exporting repository
1415
1415
1416 Returns 0 on success.
1416 Returns 0 on success.
1417 """
1417 """
1418 ctx = scmutil.revsingle(repo, opts.get('rev'))
1418 ctx = scmutil.revsingle(repo, opts.get('rev'))
1419 m = scmutil.match(ctx, (file1,) + pats, opts)
1419 m = scmutil.match(ctx, (file1,) + pats, opts)
1420
1420
1421 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1421 return cmdutil.cat(ui, repo, ctx, m, '', **opts)
1422
1422
1423 @command('^clone',
1423 @command('^clone',
1424 [('U', 'noupdate', None, _('the clone will include an empty working '
1424 [('U', 'noupdate', None, _('the clone will include an empty working '
1425 'directory (only a repository)')),
1425 'directory (only a repository)')),
1426 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1426 ('u', 'updaterev', '', _('revision, tag, or branch to check out'),
1427 _('REV')),
1427 _('REV')),
1428 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1428 ('r', 'rev', [], _('include the specified changeset'), _('REV')),
1429 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1429 ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')),
1430 ('', 'pull', None, _('use pull protocol to copy metadata')),
1430 ('', 'pull', None, _('use pull protocol to copy metadata')),
1431 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1431 ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')),
1432 ] + remoteopts,
1432 ] + remoteopts,
1433 _('[OPTION]... SOURCE [DEST]'),
1433 _('[OPTION]... SOURCE [DEST]'),
1434 norepo=True)
1434 norepo=True)
1435 def clone(ui, source, dest=None, **opts):
1435 def clone(ui, source, dest=None, **opts):
1436 """make a copy of an existing repository
1436 """make a copy of an existing repository
1437
1437
1438 Create a copy of an existing repository in a new directory.
1438 Create a copy of an existing repository in a new directory.
1439
1439
1440 If no destination directory name is specified, it defaults to the
1440 If no destination directory name is specified, it defaults to the
1441 basename of the source.
1441 basename of the source.
1442
1442
1443 The location of the source is added to the new repository's
1443 The location of the source is added to the new repository's
1444 ``.hg/hgrc`` file, as the default to be used for future pulls.
1444 ``.hg/hgrc`` file, as the default to be used for future pulls.
1445
1445
1446 Only local paths and ``ssh://`` URLs are supported as
1446 Only local paths and ``ssh://`` URLs are supported as
1447 destinations. For ``ssh://`` destinations, no working directory or
1447 destinations. For ``ssh://`` destinations, no working directory or
1448 ``.hg/hgrc`` will be created on the remote side.
1448 ``.hg/hgrc`` will be created on the remote side.
1449
1449
1450 If the source repository has a bookmark called '@' set, that
1450 If the source repository has a bookmark called '@' set, that
1451 revision will be checked out in the new repository by default.
1451 revision will be checked out in the new repository by default.
1452
1452
1453 To check out a particular version, use -u/--update, or
1453 To check out a particular version, use -u/--update, or
1454 -U/--noupdate to create a clone with no working directory.
1454 -U/--noupdate to create a clone with no working directory.
1455
1455
1456 To pull only a subset of changesets, specify one or more revisions
1456 To pull only a subset of changesets, specify one or more revisions
1457 identifiers with -r/--rev or branches with -b/--branch. The
1457 identifiers with -r/--rev or branches with -b/--branch. The
1458 resulting clone will contain only the specified changesets and
1458 resulting clone will contain only the specified changesets and
1459 their ancestors. These options (or 'clone src#rev dest') imply
1459 their ancestors. These options (or 'clone src#rev dest') imply
1460 --pull, even for local source repositories.
1460 --pull, even for local source repositories.
1461
1461
1462 .. note::
1462 .. note::
1463
1463
1464 Specifying a tag will include the tagged changeset but not the
1464 Specifying a tag will include the tagged changeset but not the
1465 changeset containing the tag.
1465 changeset containing the tag.
1466
1466
1467 .. container:: verbose
1467 .. container:: verbose
1468
1468
1469 For efficiency, hardlinks are used for cloning whenever the
1469 For efficiency, hardlinks are used for cloning whenever the
1470 source and destination are on the same filesystem (note this
1470 source and destination are on the same filesystem (note this
1471 applies only to the repository data, not to the working
1471 applies only to the repository data, not to the working
1472 directory). Some filesystems, such as AFS, implement hardlinking
1472 directory). Some filesystems, such as AFS, implement hardlinking
1473 incorrectly, but do not report errors. In these cases, use the
1473 incorrectly, but do not report errors. In these cases, use the
1474 --pull option to avoid hardlinking.
1474 --pull option to avoid hardlinking.
1475
1475
1476 In some cases, you can clone repositories and the working
1476 In some cases, you can clone repositories and the working
1477 directory using full hardlinks with ::
1477 directory using full hardlinks with ::
1478
1478
1479 $ cp -al REPO REPOCLONE
1479 $ cp -al REPO REPOCLONE
1480
1480
1481 This is the fastest way to clone, but it is not always safe. The
1481 This is the fastest way to clone, but it is not always safe. The
1482 operation is not atomic (making sure REPO is not modified during
1482 operation is not atomic (making sure REPO is not modified during
1483 the operation is up to you) and you have to make sure your
1483 the operation is up to you) and you have to make sure your
1484 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1484 editor breaks hardlinks (Emacs and most Linux Kernel tools do
1485 so). Also, this is not compatible with certain extensions that
1485 so). Also, this is not compatible with certain extensions that
1486 place their metadata under the .hg directory, such as mq.
1486 place their metadata under the .hg directory, such as mq.
1487
1487
1488 Mercurial will update the working directory to the first applicable
1488 Mercurial will update the working directory to the first applicable
1489 revision from this list:
1489 revision from this list:
1490
1490
1491 a) null if -U or the source repository has no changesets
1491 a) null if -U or the source repository has no changesets
1492 b) if -u . and the source repository is local, the first parent of
1492 b) if -u . and the source repository is local, the first parent of
1493 the source repository's working directory
1493 the source repository's working directory
1494 c) the changeset specified with -u (if a branch name, this means the
1494 c) the changeset specified with -u (if a branch name, this means the
1495 latest head of that branch)
1495 latest head of that branch)
1496 d) the changeset specified with -r
1496 d) the changeset specified with -r
1497 e) the tipmost head specified with -b
1497 e) the tipmost head specified with -b
1498 f) the tipmost head specified with the url#branch source syntax
1498 f) the tipmost head specified with the url#branch source syntax
1499 g) the revision marked with the '@' bookmark, if present
1499 g) the revision marked with the '@' bookmark, if present
1500 h) the tipmost head of the default branch
1500 h) the tipmost head of the default branch
1501 i) tip
1501 i) tip
1502
1502
1503 When cloning from servers that support it, Mercurial may fetch
1503 When cloning from servers that support it, Mercurial may fetch
1504 pre-generated data from a server-advertised URL. When this is done,
1504 pre-generated data from a server-advertised URL. When this is done,
1505 hooks operating on incoming changesets and changegroups may fire twice,
1505 hooks operating on incoming changesets and changegroups may fire twice,
1506 once for the bundle fetched from the URL and another for any additional
1506 once for the bundle fetched from the URL and another for any additional
1507 data not fetched from this URL. In addition, if an error occurs, the
1507 data not fetched from this URL. In addition, if an error occurs, the
1508 repository may be rolled back to a partial clone. This behavior may
1508 repository may be rolled back to a partial clone. This behavior may
1509 change in future releases. See :hg:`help -e clonebundles` for more.
1509 change in future releases. See :hg:`help -e clonebundles` for more.
1510
1510
1511 Examples:
1511 Examples:
1512
1512
1513 - clone a remote repository to a new directory named hg/::
1513 - clone a remote repository to a new directory named hg/::
1514
1514
1515 hg clone https://www.mercurial-scm.org/repo/hg/
1515 hg clone https://www.mercurial-scm.org/repo/hg/
1516
1516
1517 - create a lightweight local clone::
1517 - create a lightweight local clone::
1518
1518
1519 hg clone project/ project-feature/
1519 hg clone project/ project-feature/
1520
1520
1521 - clone from an absolute path on an ssh server (note double-slash)::
1521 - clone from an absolute path on an ssh server (note double-slash)::
1522
1522
1523 hg clone ssh://user@server//home/projects/alpha/
1523 hg clone ssh://user@server//home/projects/alpha/
1524
1524
1525 - do a high-speed clone over a LAN while checking out a
1525 - do a high-speed clone over a LAN while checking out a
1526 specified version::
1526 specified version::
1527
1527
1528 hg clone --uncompressed http://server/repo -u 1.5
1528 hg clone --uncompressed http://server/repo -u 1.5
1529
1529
1530 - create a repository without changesets after a particular revision::
1530 - create a repository without changesets after a particular revision::
1531
1531
1532 hg clone -r 04e544 experimental/ good/
1532 hg clone -r 04e544 experimental/ good/
1533
1533
1534 - clone (and track) a particular named branch::
1534 - clone (and track) a particular named branch::
1535
1535
1536 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1536 hg clone https://www.mercurial-scm.org/repo/hg/#stable
1537
1537
1538 See :hg:`help urls` for details on specifying URLs.
1538 See :hg:`help urls` for details on specifying URLs.
1539
1539
1540 Returns 0 on success.
1540 Returns 0 on success.
1541 """
1541 """
1542 if opts.get('noupdate') and opts.get('updaterev'):
1542 if opts.get('noupdate') and opts.get('updaterev'):
1543 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1543 raise error.Abort(_("cannot specify both --noupdate and --updaterev"))
1544
1544
1545 r = hg.clone(ui, opts, source, dest,
1545 r = hg.clone(ui, opts, source, dest,
1546 pull=opts.get('pull'),
1546 pull=opts.get('pull'),
1547 stream=opts.get('uncompressed'),
1547 stream=opts.get('uncompressed'),
1548 rev=opts.get('rev'),
1548 rev=opts.get('rev'),
1549 update=opts.get('updaterev') or not opts.get('noupdate'),
1549 update=opts.get('updaterev') or not opts.get('noupdate'),
1550 branch=opts.get('branch'),
1550 branch=opts.get('branch'),
1551 shareopts=opts.get('shareopts'))
1551 shareopts=opts.get('shareopts'))
1552
1552
1553 return r is None
1553 return r is None
1554
1554
1555 @command('^commit|ci',
1555 @command('^commit|ci',
1556 [('A', 'addremove', None,
1556 [('A', 'addremove', None,
1557 _('mark new/missing files as added/removed before committing')),
1557 _('mark new/missing files as added/removed before committing')),
1558 ('', 'close-branch', None,
1558 ('', 'close-branch', None,
1559 _('mark a branch head as closed')),
1559 _('mark a branch head as closed')),
1560 ('', 'amend', None, _('amend the parent of the working directory')),
1560 ('', 'amend', None, _('amend the parent of the working directory')),
1561 ('s', 'secret', None, _('use the secret phase for committing')),
1561 ('s', 'secret', None, _('use the secret phase for committing')),
1562 ('e', 'edit', None, _('invoke editor on commit messages')),
1562 ('e', 'edit', None, _('invoke editor on commit messages')),
1563 ('i', 'interactive', None, _('use interactive mode')),
1563 ('i', 'interactive', None, _('use interactive mode')),
1564 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1564 ] + walkopts + commitopts + commitopts2 + subrepoopts,
1565 _('[OPTION]... [FILE]...'),
1565 _('[OPTION]... [FILE]...'),
1566 inferrepo=True)
1566 inferrepo=True)
1567 def commit(ui, repo, *pats, **opts):
1567 def commit(ui, repo, *pats, **opts):
1568 """commit the specified files or all outstanding changes
1568 """commit the specified files or all outstanding changes
1569
1569
1570 Commit changes to the given files into the repository. Unlike a
1570 Commit changes to the given files into the repository. Unlike a
1571 centralized SCM, this operation is a local operation. See
1571 centralized SCM, this operation is a local operation. See
1572 :hg:`push` for a way to actively distribute your changes.
1572 :hg:`push` for a way to actively distribute your changes.
1573
1573
1574 If a list of files is omitted, all changes reported by :hg:`status`
1574 If a list of files is omitted, all changes reported by :hg:`status`
1575 will be committed.
1575 will be committed.
1576
1576
1577 If you are committing the result of a merge, do not provide any
1577 If you are committing the result of a merge, do not provide any
1578 filenames or -I/-X filters.
1578 filenames or -I/-X filters.
1579
1579
1580 If no commit message is specified, Mercurial starts your
1580 If no commit message is specified, Mercurial starts your
1581 configured editor where you can enter a message. In case your
1581 configured editor where you can enter a message. In case your
1582 commit fails, you will find a backup of your message in
1582 commit fails, you will find a backup of your message in
1583 ``.hg/last-message.txt``.
1583 ``.hg/last-message.txt``.
1584
1584
1585 The --close-branch flag can be used to mark the current branch
1585 The --close-branch flag can be used to mark the current branch
1586 head closed. When all heads of a branch are closed, the branch
1586 head closed. When all heads of a branch are closed, the branch
1587 will be considered closed and no longer listed.
1587 will be considered closed and no longer listed.
1588
1588
1589 The --amend flag can be used to amend the parent of the
1589 The --amend flag can be used to amend the parent of the
1590 working directory with a new commit that contains the changes
1590 working directory with a new commit that contains the changes
1591 in the parent in addition to those currently reported by :hg:`status`,
1591 in the parent in addition to those currently reported by :hg:`status`,
1592 if there are any. The old commit is stored in a backup bundle in
1592 if there are any. The old commit is stored in a backup bundle in
1593 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1593 ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
1594 on how to restore it).
1594 on how to restore it).
1595
1595
1596 Message, user and date are taken from the amended commit unless
1596 Message, user and date are taken from the amended commit unless
1597 specified. When a message isn't specified on the command line,
1597 specified. When a message isn't specified on the command line,
1598 the editor will open with the message of the amended commit.
1598 the editor will open with the message of the amended commit.
1599
1599
1600 It is not possible to amend public changesets (see :hg:`help phases`)
1600 It is not possible to amend public changesets (see :hg:`help phases`)
1601 or changesets that have children.
1601 or changesets that have children.
1602
1602
1603 See :hg:`help dates` for a list of formats valid for -d/--date.
1603 See :hg:`help dates` for a list of formats valid for -d/--date.
1604
1604
1605 Returns 0 on success, 1 if nothing changed.
1605 Returns 0 on success, 1 if nothing changed.
1606
1606
1607 .. container:: verbose
1607 .. container:: verbose
1608
1608
1609 Examples:
1609 Examples:
1610
1610
1611 - commit all files ending in .py::
1611 - commit all files ending in .py::
1612
1612
1613 hg commit --include "set:**.py"
1613 hg commit --include "set:**.py"
1614
1614
1615 - commit all non-binary files::
1615 - commit all non-binary files::
1616
1616
1617 hg commit --exclude "set:binary()"
1617 hg commit --exclude "set:binary()"
1618
1618
1619 - amend the current commit and set the date to now::
1619 - amend the current commit and set the date to now::
1620
1620
1621 hg commit --amend --date now
1621 hg commit --amend --date now
1622 """
1622 """
1623 wlock = lock = None
1623 wlock = lock = None
1624 try:
1624 try:
1625 wlock = repo.wlock()
1625 wlock = repo.wlock()
1626 lock = repo.lock()
1626 lock = repo.lock()
1627 return _docommit(ui, repo, *pats, **opts)
1627 return _docommit(ui, repo, *pats, **opts)
1628 finally:
1628 finally:
1629 release(lock, wlock)
1629 release(lock, wlock)
1630
1630
1631 def _docommit(ui, repo, *pats, **opts):
1631 def _docommit(ui, repo, *pats, **opts):
1632 if opts.get('interactive'):
1632 if opts.get('interactive'):
1633 opts.pop('interactive')
1633 opts.pop('interactive')
1634 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1634 ret = cmdutil.dorecord(ui, repo, commit, None, False,
1635 cmdutil.recordfilter, *pats, **opts)
1635 cmdutil.recordfilter, *pats, **opts)
1636 # ret can be 0 (no changes to record) or the value returned by
1636 # ret can be 0 (no changes to record) or the value returned by
1637 # commit(), 1 if nothing changed or None on success.
1637 # commit(), 1 if nothing changed or None on success.
1638 return 1 if ret == 0 else ret
1638 return 1 if ret == 0 else ret
1639
1639
1640 if opts.get('subrepos'):
1640 if opts.get('subrepos'):
1641 if opts.get('amend'):
1641 if opts.get('amend'):
1642 raise error.Abort(_('cannot amend with --subrepos'))
1642 raise error.Abort(_('cannot amend with --subrepos'))
1643 # Let --subrepos on the command line override config setting.
1643 # Let --subrepos on the command line override config setting.
1644 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1644 ui.setconfig('ui', 'commitsubrepos', True, 'commit')
1645
1645
1646 cmdutil.checkunfinished(repo, commit=True)
1646 cmdutil.checkunfinished(repo, commit=True)
1647
1647
1648 branch = repo[None].branch()
1648 branch = repo[None].branch()
1649 bheads = repo.branchheads(branch)
1649 bheads = repo.branchheads(branch)
1650
1650
1651 extra = {}
1651 extra = {}
1652 if opts.get('close_branch'):
1652 if opts.get('close_branch'):
1653 extra['close'] = 1
1653 extra['close'] = 1
1654
1654
1655 if not bheads:
1655 if not bheads:
1656 raise error.Abort(_('can only close branch heads'))
1656 raise error.Abort(_('can only close branch heads'))
1657 elif opts.get('amend'):
1657 elif opts.get('amend'):
1658 if repo[None].parents()[0].p1().branch() != branch and \
1658 if repo[None].parents()[0].p1().branch() != branch and \
1659 repo[None].parents()[0].p2().branch() != branch:
1659 repo[None].parents()[0].p2().branch() != branch:
1660 raise error.Abort(_('can only close branch heads'))
1660 raise error.Abort(_('can only close branch heads'))
1661
1661
1662 if opts.get('amend'):
1662 if opts.get('amend'):
1663 if ui.configbool('ui', 'commitsubrepos'):
1663 if ui.configbool('ui', 'commitsubrepos'):
1664 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1664 raise error.Abort(_('cannot amend with ui.commitsubrepos enabled'))
1665
1665
1666 old = repo['.']
1666 old = repo['.']
1667 if not old.mutable():
1667 if not old.mutable():
1668 raise error.Abort(_('cannot amend public changesets'))
1668 raise error.Abort(_('cannot amend public changesets'))
1669 if len(repo[None].parents()) > 1:
1669 if len(repo[None].parents()) > 1:
1670 raise error.Abort(_('cannot amend while merging'))
1670 raise error.Abort(_('cannot amend while merging'))
1671 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1671 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
1672 if not allowunstable and old.children():
1672 if not allowunstable and old.children():
1673 raise error.Abort(_('cannot amend changeset with children'))
1673 raise error.Abort(_('cannot amend changeset with children'))
1674
1674
1675 # Currently histedit gets confused if an amend happens while histedit
1675 # Currently histedit gets confused if an amend happens while histedit
1676 # is in progress. Since we have a checkunfinished command, we are
1676 # is in progress. Since we have a checkunfinished command, we are
1677 # temporarily honoring it.
1677 # temporarily honoring it.
1678 #
1678 #
1679 # Note: eventually this guard will be removed. Please do not expect
1679 # Note: eventually this guard will be removed. Please do not expect
1680 # this behavior to remain.
1680 # this behavior to remain.
1681 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1681 if not obsolete.isenabled(repo, obsolete.createmarkersopt):
1682 cmdutil.checkunfinished(repo)
1682 cmdutil.checkunfinished(repo)
1683
1683
1684 # commitfunc is used only for temporary amend commit by cmdutil.amend
1684 # commitfunc is used only for temporary amend commit by cmdutil.amend
1685 def commitfunc(ui, repo, message, match, opts):
1685 def commitfunc(ui, repo, message, match, opts):
1686 return repo.commit(message,
1686 return repo.commit(message,
1687 opts.get('user') or old.user(),
1687 opts.get('user') or old.user(),
1688 opts.get('date') or old.date(),
1688 opts.get('date') or old.date(),
1689 match,
1689 match,
1690 extra=extra)
1690 extra=extra)
1691
1691
1692 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1692 node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
1693 if node == old.node():
1693 if node == old.node():
1694 ui.status(_("nothing changed\n"))
1694 ui.status(_("nothing changed\n"))
1695 return 1
1695 return 1
1696 else:
1696 else:
1697 def commitfunc(ui, repo, message, match, opts):
1697 def commitfunc(ui, repo, message, match, opts):
1698 backup = ui.backupconfig('phases', 'new-commit')
1698 backup = ui.backupconfig('phases', 'new-commit')
1699 baseui = repo.baseui
1699 baseui = repo.baseui
1700 basebackup = baseui.backupconfig('phases', 'new-commit')
1700 basebackup = baseui.backupconfig('phases', 'new-commit')
1701 try:
1701 try:
1702 if opts.get('secret'):
1702 if opts.get('secret'):
1703 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1703 ui.setconfig('phases', 'new-commit', 'secret', 'commit')
1704 # Propagate to subrepos
1704 # Propagate to subrepos
1705 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1705 baseui.setconfig('phases', 'new-commit', 'secret', 'commit')
1706
1706
1707 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1707 editform = cmdutil.mergeeditform(repo[None], 'commit.normal')
1708 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1708 editor = cmdutil.getcommiteditor(editform=editform, **opts)
1709 return repo.commit(message, opts.get('user'), opts.get('date'),
1709 return repo.commit(message, opts.get('user'), opts.get('date'),
1710 match,
1710 match,
1711 editor=editor,
1711 editor=editor,
1712 extra=extra)
1712 extra=extra)
1713 finally:
1713 finally:
1714 ui.restoreconfig(backup)
1714 ui.restoreconfig(backup)
1715 repo.baseui.restoreconfig(basebackup)
1715 repo.baseui.restoreconfig(basebackup)
1716
1716
1717
1717
1718 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1718 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
1719
1719
1720 if not node:
1720 if not node:
1721 stat = cmdutil.postcommitstatus(repo, pats, opts)
1721 stat = cmdutil.postcommitstatus(repo, pats, opts)
1722 if stat[3]:
1722 if stat[3]:
1723 ui.status(_("nothing changed (%d missing files, see "
1723 ui.status(_("nothing changed (%d missing files, see "
1724 "'hg status')\n") % len(stat[3]))
1724 "'hg status')\n") % len(stat[3]))
1725 else:
1725 else:
1726 ui.status(_("nothing changed\n"))
1726 ui.status(_("nothing changed\n"))
1727 return 1
1727 return 1
1728
1728
1729 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1729 cmdutil.commitstatus(repo, node, branch, bheads, opts)
1730
1730
1731 @command('config|showconfig|debugconfig',
1731 @command('config|showconfig|debugconfig',
1732 [('u', 'untrusted', None, _('show untrusted configuration options')),
1732 [('u', 'untrusted', None, _('show untrusted configuration options')),
1733 ('e', 'edit', None, _('edit user config')),
1733 ('e', 'edit', None, _('edit user config')),
1734 ('l', 'local', None, _('edit repository config')),
1734 ('l', 'local', None, _('edit repository config')),
1735 ('g', 'global', None, _('edit global config'))] + formatteropts,
1735 ('g', 'global', None, _('edit global config'))] + formatteropts,
1736 _('[-u] [NAME]...'),
1736 _('[-u] [NAME]...'),
1737 optionalrepo=True)
1737 optionalrepo=True)
1738 def config(ui, repo, *values, **opts):
1738 def config(ui, repo, *values, **opts):
1739 """show combined config settings from all hgrc files
1739 """show combined config settings from all hgrc files
1740
1740
1741 With no arguments, print names and values of all config items.
1741 With no arguments, print names and values of all config items.
1742
1742
1743 With one argument of the form section.name, print just the value
1743 With one argument of the form section.name, print just the value
1744 of that config item.
1744 of that config item.
1745
1745
1746 With multiple arguments, print names and values of all config
1746 With multiple arguments, print names and values of all config
1747 items with matching section names.
1747 items with matching section names.
1748
1748
1749 With --edit, start an editor on the user-level config file. With
1749 With --edit, start an editor on the user-level config file. With
1750 --global, edit the system-wide config file. With --local, edit the
1750 --global, edit the system-wide config file. With --local, edit the
1751 repository-level config file.
1751 repository-level config file.
1752
1752
1753 With --debug, the source (filename and line number) is printed
1753 With --debug, the source (filename and line number) is printed
1754 for each config item.
1754 for each config item.
1755
1755
1756 See :hg:`help config` for more information about config files.
1756 See :hg:`help config` for more information about config files.
1757
1757
1758 Returns 0 on success, 1 if NAME does not exist.
1758 Returns 0 on success, 1 if NAME does not exist.
1759
1759
1760 """
1760 """
1761
1761
1762 if opts.get('edit') or opts.get('local') or opts.get('global'):
1762 if opts.get('edit') or opts.get('local') or opts.get('global'):
1763 if opts.get('local') and opts.get('global'):
1763 if opts.get('local') and opts.get('global'):
1764 raise error.Abort(_("can't use --local and --global together"))
1764 raise error.Abort(_("can't use --local and --global together"))
1765
1765
1766 if opts.get('local'):
1766 if opts.get('local'):
1767 if not repo:
1767 if not repo:
1768 raise error.Abort(_("can't use --local outside a repository"))
1768 raise error.Abort(_("can't use --local outside a repository"))
1769 paths = [repo.join('hgrc')]
1769 paths = [repo.join('hgrc')]
1770 elif opts.get('global'):
1770 elif opts.get('global'):
1771 paths = scmutil.systemrcpath()
1771 paths = scmutil.systemrcpath()
1772 else:
1772 else:
1773 paths = scmutil.userrcpath()
1773 paths = scmutil.userrcpath()
1774
1774
1775 for f in paths:
1775 for f in paths:
1776 if os.path.exists(f):
1776 if os.path.exists(f):
1777 break
1777 break
1778 else:
1778 else:
1779 if opts.get('global'):
1779 if opts.get('global'):
1780 samplehgrc = uimod.samplehgrcs['global']
1780 samplehgrc = uimod.samplehgrcs['global']
1781 elif opts.get('local'):
1781 elif opts.get('local'):
1782 samplehgrc = uimod.samplehgrcs['local']
1782 samplehgrc = uimod.samplehgrcs['local']
1783 else:
1783 else:
1784 samplehgrc = uimod.samplehgrcs['user']
1784 samplehgrc = uimod.samplehgrcs['user']
1785
1785
1786 f = paths[0]
1786 f = paths[0]
1787 fp = open(f, "w")
1787 fp = open(f, "w")
1788 fp.write(samplehgrc)
1788 fp.write(samplehgrc)
1789 fp.close()
1789 fp.close()
1790
1790
1791 editor = ui.geteditor()
1791 editor = ui.geteditor()
1792 ui.system("%s \"%s\"" % (editor, f),
1792 ui.system("%s \"%s\"" % (editor, f),
1793 onerr=error.Abort, errprefix=_("edit failed"))
1793 onerr=error.Abort, errprefix=_("edit failed"))
1794 return
1794 return
1795
1795
1796 fm = ui.formatter('config', opts)
1796 fm = ui.formatter('config', opts)
1797 for f in scmutil.rcpath():
1797 for f in scmutil.rcpath():
1798 ui.debug('read config from: %s\n' % f)
1798 ui.debug('read config from: %s\n' % f)
1799 untrusted = bool(opts.get('untrusted'))
1799 untrusted = bool(opts.get('untrusted'))
1800 if values:
1800 if values:
1801 sections = [v for v in values if '.' not in v]
1801 sections = [v for v in values if '.' not in v]
1802 items = [v for v in values if '.' in v]
1802 items = [v for v in values if '.' in v]
1803 if len(items) > 1 or items and sections:
1803 if len(items) > 1 or items and sections:
1804 raise error.Abort(_('only one config item permitted'))
1804 raise error.Abort(_('only one config item permitted'))
1805 matched = False
1805 matched = False
1806 for section, name, value in ui.walkconfig(untrusted=untrusted):
1806 for section, name, value in ui.walkconfig(untrusted=untrusted):
1807 source = ui.configsource(section, name, untrusted)
1807 source = ui.configsource(section, name, untrusted)
1808 value = str(value)
1808 value = str(value)
1809 if fm.isplain():
1809 if fm.isplain():
1810 source = source or 'none'
1810 source = source or 'none'
1811 value = value.replace('\n', '\\n')
1811 value = value.replace('\n', '\\n')
1812 entryname = section + '.' + name
1812 entryname = section + '.' + name
1813 if values:
1813 if values:
1814 for v in values:
1814 for v in values:
1815 if v == section:
1815 if v == section:
1816 fm.startitem()
1816 fm.startitem()
1817 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1817 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1818 fm.write('name value', '%s=%s\n', entryname, value)
1818 fm.write('name value', '%s=%s\n', entryname, value)
1819 matched = True
1819 matched = True
1820 elif v == entryname:
1820 elif v == entryname:
1821 fm.startitem()
1821 fm.startitem()
1822 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1822 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1823 fm.write('value', '%s\n', value)
1823 fm.write('value', '%s\n', value)
1824 fm.data(name=entryname)
1824 fm.data(name=entryname)
1825 matched = True
1825 matched = True
1826 else:
1826 else:
1827 fm.startitem()
1827 fm.startitem()
1828 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1828 fm.condwrite(ui.debugflag, 'source', '%s: ', source)
1829 fm.write('name value', '%s=%s\n', entryname, value)
1829 fm.write('name value', '%s=%s\n', entryname, value)
1830 matched = True
1830 matched = True
1831 fm.end()
1831 fm.end()
1832 if matched:
1832 if matched:
1833 return 0
1833 return 0
1834 return 1
1834 return 1
1835
1835
1836 @command('copy|cp',
1836 @command('copy|cp',
1837 [('A', 'after', None, _('record a copy that has already occurred')),
1837 [('A', 'after', None, _('record a copy that has already occurred')),
1838 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1838 ('f', 'force', None, _('forcibly copy over an existing managed file')),
1839 ] + walkopts + dryrunopts,
1839 ] + walkopts + dryrunopts,
1840 _('[OPTION]... [SOURCE]... DEST'))
1840 _('[OPTION]... [SOURCE]... DEST'))
1841 def copy(ui, repo, *pats, **opts):
1841 def copy(ui, repo, *pats, **opts):
1842 """mark files as copied for the next commit
1842 """mark files as copied for the next commit
1843
1843
1844 Mark dest as having copies of source files. If dest is a
1844 Mark dest as having copies of source files. If dest is a
1845 directory, copies are put in that directory. If dest is a file,
1845 directory, copies are put in that directory. If dest is a file,
1846 the source must be a single file.
1846 the source must be a single file.
1847
1847
1848 By default, this command copies the contents of files as they
1848 By default, this command copies the contents of files as they
1849 exist in the working directory. If invoked with -A/--after, the
1849 exist in the working directory. If invoked with -A/--after, the
1850 operation is recorded, but no copying is performed.
1850 operation is recorded, but no copying is performed.
1851
1851
1852 This command takes effect with the next commit. To undo a copy
1852 This command takes effect with the next commit. To undo a copy
1853 before that, see :hg:`revert`.
1853 before that, see :hg:`revert`.
1854
1854
1855 Returns 0 on success, 1 if errors are encountered.
1855 Returns 0 on success, 1 if errors are encountered.
1856 """
1856 """
1857 with repo.wlock(False):
1857 with repo.wlock(False):
1858 return cmdutil.copy(ui, repo, pats, opts)
1858 return cmdutil.copy(ui, repo, pats, opts)
1859
1859
1860 @command('debuginstall', [] + formatteropts, '', norepo=True)
1860 @command('debuginstall', [] + formatteropts, '', norepo=True)
1861 def debuginstall(ui, **opts):
1861 def debuginstall(ui, **opts):
1862 '''test Mercurial installation
1862 '''test Mercurial installation
1863
1863
1864 Returns 0 on success.
1864 Returns 0 on success.
1865 '''
1865 '''
1866
1866
1867 def writetemp(contents):
1867 def writetemp(contents):
1868 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1868 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1869 f = os.fdopen(fd, "wb")
1869 f = os.fdopen(fd, "wb")
1870 f.write(contents)
1870 f.write(contents)
1871 f.close()
1871 f.close()
1872 return name
1872 return name
1873
1873
1874 problems = 0
1874 problems = 0
1875
1875
1876 fm = ui.formatter('debuginstall', opts)
1876 fm = ui.formatter('debuginstall', opts)
1877 fm.startitem()
1877 fm.startitem()
1878
1878
1879 # encoding
1879 # encoding
1880 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1880 fm.write('encoding', _("checking encoding (%s)...\n"), encoding.encoding)
1881 err = None
1881 err = None
1882 try:
1882 try:
1883 encoding.fromlocal("test")
1883 encoding.fromlocal("test")
1884 except error.Abort as inst:
1884 except error.Abort as inst:
1885 err = inst
1885 err = inst
1886 problems += 1
1886 problems += 1
1887 fm.condwrite(err, 'encodingerror', _(" %s\n"
1887 fm.condwrite(err, 'encodingerror', _(" %s\n"
1888 " (check that your locale is properly set)\n"), err)
1888 " (check that your locale is properly set)\n"), err)
1889
1889
1890 # Python
1890 # Python
1891 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1891 fm.write('pythonexe', _("checking Python executable (%s)\n"),
1892 sys.executable)
1892 pycompat.sysexecutable)
1893 fm.write('pythonver', _("checking Python version (%s)\n"),
1893 fm.write('pythonver', _("checking Python version (%s)\n"),
1894 ("%d.%d.%d" % sys.version_info[:3]))
1894 ("%d.%d.%d" % sys.version_info[:3]))
1895 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1895 fm.write('pythonlib', _("checking Python lib (%s)...\n"),
1896 os.path.dirname(os.__file__))
1896 os.path.dirname(os.__file__))
1897
1897
1898 security = set(sslutil.supportedprotocols)
1898 security = set(sslutil.supportedprotocols)
1899 if sslutil.hassni:
1899 if sslutil.hassni:
1900 security.add('sni')
1900 security.add('sni')
1901
1901
1902 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1902 fm.write('pythonsecurity', _("checking Python security support (%s)\n"),
1903 fm.formatlist(sorted(security), name='protocol',
1903 fm.formatlist(sorted(security), name='protocol',
1904 fmt='%s', sep=','))
1904 fmt='%s', sep=','))
1905
1905
1906 # These are warnings, not errors. So don't increment problem count. This
1906 # These are warnings, not errors. So don't increment problem count. This
1907 # may change in the future.
1907 # may change in the future.
1908 if 'tls1.2' not in security:
1908 if 'tls1.2' not in security:
1909 fm.plain(_(' TLS 1.2 not supported by Python install; '
1909 fm.plain(_(' TLS 1.2 not supported by Python install; '
1910 'network connections lack modern security\n'))
1910 'network connections lack modern security\n'))
1911 if 'sni' not in security:
1911 if 'sni' not in security:
1912 fm.plain(_(' SNI not supported by Python install; may have '
1912 fm.plain(_(' SNI not supported by Python install; may have '
1913 'connectivity issues with some servers\n'))
1913 'connectivity issues with some servers\n'))
1914
1914
1915 # TODO print CA cert info
1915 # TODO print CA cert info
1916
1916
1917 # hg version
1917 # hg version
1918 hgver = util.version()
1918 hgver = util.version()
1919 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1919 fm.write('hgver', _("checking Mercurial version (%s)\n"),
1920 hgver.split('+')[0])
1920 hgver.split('+')[0])
1921 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1921 fm.write('hgverextra', _("checking Mercurial custom build (%s)\n"),
1922 '+'.join(hgver.split('+')[1:]))
1922 '+'.join(hgver.split('+')[1:]))
1923
1923
1924 # compiled modules
1924 # compiled modules
1925 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1925 fm.write('hgmodulepolicy', _("checking module policy (%s)\n"),
1926 policy.policy)
1926 policy.policy)
1927 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1927 fm.write('hgmodules', _("checking installed modules (%s)...\n"),
1928 os.path.dirname(__file__))
1928 os.path.dirname(__file__))
1929
1929
1930 err = None
1930 err = None
1931 try:
1931 try:
1932 from . import (
1932 from . import (
1933 base85,
1933 base85,
1934 bdiff,
1934 bdiff,
1935 mpatch,
1935 mpatch,
1936 osutil,
1936 osutil,
1937 )
1937 )
1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1938 dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
1939 except Exception as inst:
1939 except Exception as inst:
1940 err = inst
1940 err = inst
1941 problems += 1
1941 problems += 1
1942 fm.condwrite(err, 'extensionserror', " %s\n", err)
1942 fm.condwrite(err, 'extensionserror', " %s\n", err)
1943
1943
1944 compengines = util.compengines._engines.values()
1944 compengines = util.compengines._engines.values()
1945 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1945 fm.write('compengines', _('checking registered compression engines (%s)\n'),
1946 fm.formatlist(sorted(e.name() for e in compengines),
1946 fm.formatlist(sorted(e.name() for e in compengines),
1947 name='compengine', fmt='%s', sep=', '))
1947 name='compengine', fmt='%s', sep=', '))
1948 fm.write('compenginesavail', _('checking available compression engines '
1948 fm.write('compenginesavail', _('checking available compression engines '
1949 '(%s)\n'),
1949 '(%s)\n'),
1950 fm.formatlist(sorted(e.name() for e in compengines
1950 fm.formatlist(sorted(e.name() for e in compengines
1951 if e.available()),
1951 if e.available()),
1952 name='compengine', fmt='%s', sep=', '))
1952 name='compengine', fmt='%s', sep=', '))
1953
1953
1954 # templates
1954 # templates
1955 p = templater.templatepaths()
1955 p = templater.templatepaths()
1956 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1956 fm.write('templatedirs', 'checking templates (%s)...\n', ' '.join(p))
1957 fm.condwrite(not p, '', _(" no template directories found\n"))
1957 fm.condwrite(not p, '', _(" no template directories found\n"))
1958 if p:
1958 if p:
1959 m = templater.templatepath("map-cmdline.default")
1959 m = templater.templatepath("map-cmdline.default")
1960 if m:
1960 if m:
1961 # template found, check if it is working
1961 # template found, check if it is working
1962 err = None
1962 err = None
1963 try:
1963 try:
1964 templater.templater.frommapfile(m)
1964 templater.templater.frommapfile(m)
1965 except Exception as inst:
1965 except Exception as inst:
1966 err = inst
1966 err = inst
1967 p = None
1967 p = None
1968 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1968 fm.condwrite(err, 'defaulttemplateerror', " %s\n", err)
1969 else:
1969 else:
1970 p = None
1970 p = None
1971 fm.condwrite(p, 'defaulttemplate',
1971 fm.condwrite(p, 'defaulttemplate',
1972 _("checking default template (%s)\n"), m)
1972 _("checking default template (%s)\n"), m)
1973 fm.condwrite(not m, 'defaulttemplatenotfound',
1973 fm.condwrite(not m, 'defaulttemplatenotfound',
1974 _(" template '%s' not found\n"), "default")
1974 _(" template '%s' not found\n"), "default")
1975 if not p:
1975 if not p:
1976 problems += 1
1976 problems += 1
1977 fm.condwrite(not p, '',
1977 fm.condwrite(not p, '',
1978 _(" (templates seem to have been installed incorrectly)\n"))
1978 _(" (templates seem to have been installed incorrectly)\n"))
1979
1979
1980 # editor
1980 # editor
1981 editor = ui.geteditor()
1981 editor = ui.geteditor()
1982 editor = util.expandpath(editor)
1982 editor = util.expandpath(editor)
1983 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1983 fm.write('editor', _("checking commit editor... (%s)\n"), editor)
1984 cmdpath = util.findexe(shlex.split(editor)[0])
1984 cmdpath = util.findexe(shlex.split(editor)[0])
1985 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1985 fm.condwrite(not cmdpath and editor == 'vi', 'vinotfound',
1986 _(" No commit editor set and can't find %s in PATH\n"
1986 _(" No commit editor set and can't find %s in PATH\n"
1987 " (specify a commit editor in your configuration"
1987 " (specify a commit editor in your configuration"
1988 " file)\n"), not cmdpath and editor == 'vi' and editor)
1988 " file)\n"), not cmdpath and editor == 'vi' and editor)
1989 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1989 fm.condwrite(not cmdpath and editor != 'vi', 'editornotfound',
1990 _(" Can't find editor '%s' in PATH\n"
1990 _(" Can't find editor '%s' in PATH\n"
1991 " (specify a commit editor in your configuration"
1991 " (specify a commit editor in your configuration"
1992 " file)\n"), not cmdpath and editor)
1992 " file)\n"), not cmdpath and editor)
1993 if not cmdpath and editor != 'vi':
1993 if not cmdpath and editor != 'vi':
1994 problems += 1
1994 problems += 1
1995
1995
1996 # check username
1996 # check username
1997 username = None
1997 username = None
1998 err = None
1998 err = None
1999 try:
1999 try:
2000 username = ui.username()
2000 username = ui.username()
2001 except error.Abort as e:
2001 except error.Abort as e:
2002 err = e
2002 err = e
2003 problems += 1
2003 problems += 1
2004
2004
2005 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2005 fm.condwrite(username, 'username', _("checking username (%s)\n"), username)
2006 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2006 fm.condwrite(err, 'usernameerror', _("checking username...\n %s\n"
2007 " (specify a username in your configuration file)\n"), err)
2007 " (specify a username in your configuration file)\n"), err)
2008
2008
2009 fm.condwrite(not problems, '',
2009 fm.condwrite(not problems, '',
2010 _("no problems detected\n"))
2010 _("no problems detected\n"))
2011 if not problems:
2011 if not problems:
2012 fm.data(problems=problems)
2012 fm.data(problems=problems)
2013 fm.condwrite(problems, 'problems',
2013 fm.condwrite(problems, 'problems',
2014 _("%d problems detected,"
2014 _("%d problems detected,"
2015 " please check your install!\n"), problems)
2015 " please check your install!\n"), problems)
2016 fm.end()
2016 fm.end()
2017
2017
2018 return problems
2018 return problems
2019
2019
2020 @command('debugknown', [], _('REPO ID...'), norepo=True)
2020 @command('debugknown', [], _('REPO ID...'), norepo=True)
2021 def debugknown(ui, repopath, *ids, **opts):
2021 def debugknown(ui, repopath, *ids, **opts):
2022 """test whether node ids are known to a repo
2022 """test whether node ids are known to a repo
2023
2023
2024 Every ID must be a full-length hex node id string. Returns a list of 0s
2024 Every ID must be a full-length hex node id string. Returns a list of 0s
2025 and 1s indicating unknown/known.
2025 and 1s indicating unknown/known.
2026 """
2026 """
2027 repo = hg.peer(ui, opts, repopath)
2027 repo = hg.peer(ui, opts, repopath)
2028 if not repo.capable('known'):
2028 if not repo.capable('known'):
2029 raise error.Abort("known() not supported by target repository")
2029 raise error.Abort("known() not supported by target repository")
2030 flags = repo.known([bin(s) for s in ids])
2030 flags = repo.known([bin(s) for s in ids])
2031 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2031 ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
2032
2032
2033 @command('debuglabelcomplete', [], _('LABEL...'))
2033 @command('debuglabelcomplete', [], _('LABEL...'))
2034 def debuglabelcomplete(ui, repo, *args):
2034 def debuglabelcomplete(ui, repo, *args):
2035 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2035 '''backwards compatibility with old bash completion scripts (DEPRECATED)'''
2036 debugnamecomplete(ui, repo, *args)
2036 debugnamecomplete(ui, repo, *args)
2037
2037
2038 @command('debugmergestate', [], '')
2038 @command('debugmergestate', [], '')
2039 def debugmergestate(ui, repo, *args):
2039 def debugmergestate(ui, repo, *args):
2040 """print merge state
2040 """print merge state
2041
2041
2042 Use --verbose to print out information about whether v1 or v2 merge state
2042 Use --verbose to print out information about whether v1 or v2 merge state
2043 was chosen."""
2043 was chosen."""
2044 def _hashornull(h):
2044 def _hashornull(h):
2045 if h == nullhex:
2045 if h == nullhex:
2046 return 'null'
2046 return 'null'
2047 else:
2047 else:
2048 return h
2048 return h
2049
2049
2050 def printrecords(version):
2050 def printrecords(version):
2051 ui.write(('* version %s records\n') % version)
2051 ui.write(('* version %s records\n') % version)
2052 if version == 1:
2052 if version == 1:
2053 records = v1records
2053 records = v1records
2054 else:
2054 else:
2055 records = v2records
2055 records = v2records
2056
2056
2057 for rtype, record in records:
2057 for rtype, record in records:
2058 # pretty print some record types
2058 # pretty print some record types
2059 if rtype == 'L':
2059 if rtype == 'L':
2060 ui.write(('local: %s\n') % record)
2060 ui.write(('local: %s\n') % record)
2061 elif rtype == 'O':
2061 elif rtype == 'O':
2062 ui.write(('other: %s\n') % record)
2062 ui.write(('other: %s\n') % record)
2063 elif rtype == 'm':
2063 elif rtype == 'm':
2064 driver, mdstate = record.split('\0', 1)
2064 driver, mdstate = record.split('\0', 1)
2065 ui.write(('merge driver: %s (state "%s")\n')
2065 ui.write(('merge driver: %s (state "%s")\n')
2066 % (driver, mdstate))
2066 % (driver, mdstate))
2067 elif rtype in 'FDC':
2067 elif rtype in 'FDC':
2068 r = record.split('\0')
2068 r = record.split('\0')
2069 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2069 f, state, hash, lfile, afile, anode, ofile = r[0:7]
2070 if version == 1:
2070 if version == 1:
2071 onode = 'not stored in v1 format'
2071 onode = 'not stored in v1 format'
2072 flags = r[7]
2072 flags = r[7]
2073 else:
2073 else:
2074 onode, flags = r[7:9]
2074 onode, flags = r[7:9]
2075 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2075 ui.write(('file: %s (record type "%s", state "%s", hash %s)\n')
2076 % (f, rtype, state, _hashornull(hash)))
2076 % (f, rtype, state, _hashornull(hash)))
2077 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2077 ui.write((' local path: %s (flags "%s")\n') % (lfile, flags))
2078 ui.write((' ancestor path: %s (node %s)\n')
2078 ui.write((' ancestor path: %s (node %s)\n')
2079 % (afile, _hashornull(anode)))
2079 % (afile, _hashornull(anode)))
2080 ui.write((' other path: %s (node %s)\n')
2080 ui.write((' other path: %s (node %s)\n')
2081 % (ofile, _hashornull(onode)))
2081 % (ofile, _hashornull(onode)))
2082 elif rtype == 'f':
2082 elif rtype == 'f':
2083 filename, rawextras = record.split('\0', 1)
2083 filename, rawextras = record.split('\0', 1)
2084 extras = rawextras.split('\0')
2084 extras = rawextras.split('\0')
2085 i = 0
2085 i = 0
2086 extrastrings = []
2086 extrastrings = []
2087 while i < len(extras):
2087 while i < len(extras):
2088 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2088 extrastrings.append('%s = %s' % (extras[i], extras[i + 1]))
2089 i += 2
2089 i += 2
2090
2090
2091 ui.write(('file extras: %s (%s)\n')
2091 ui.write(('file extras: %s (%s)\n')
2092 % (filename, ', '.join(extrastrings)))
2092 % (filename, ', '.join(extrastrings)))
2093 elif rtype == 'l':
2093 elif rtype == 'l':
2094 labels = record.split('\0', 2)
2094 labels = record.split('\0', 2)
2095 labels = [l for l in labels if len(l) > 0]
2095 labels = [l for l in labels if len(l) > 0]
2096 ui.write(('labels:\n'))
2096 ui.write(('labels:\n'))
2097 ui.write((' local: %s\n' % labels[0]))
2097 ui.write((' local: %s\n' % labels[0]))
2098 ui.write((' other: %s\n' % labels[1]))
2098 ui.write((' other: %s\n' % labels[1]))
2099 if len(labels) > 2:
2099 if len(labels) > 2:
2100 ui.write((' base: %s\n' % labels[2]))
2100 ui.write((' base: %s\n' % labels[2]))
2101 else:
2101 else:
2102 ui.write(('unrecognized entry: %s\t%s\n')
2102 ui.write(('unrecognized entry: %s\t%s\n')
2103 % (rtype, record.replace('\0', '\t')))
2103 % (rtype, record.replace('\0', '\t')))
2104
2104
2105 # Avoid mergestate.read() since it may raise an exception for unsupported
2105 # Avoid mergestate.read() since it may raise an exception for unsupported
2106 # merge state records. We shouldn't be doing this, but this is OK since this
2106 # merge state records. We shouldn't be doing this, but this is OK since this
2107 # command is pretty low-level.
2107 # command is pretty low-level.
2108 ms = mergemod.mergestate(repo)
2108 ms = mergemod.mergestate(repo)
2109
2109
2110 # sort so that reasonable information is on top
2110 # sort so that reasonable information is on top
2111 v1records = ms._readrecordsv1()
2111 v1records = ms._readrecordsv1()
2112 v2records = ms._readrecordsv2()
2112 v2records = ms._readrecordsv2()
2113 order = 'LOml'
2113 order = 'LOml'
2114 def key(r):
2114 def key(r):
2115 idx = order.find(r[0])
2115 idx = order.find(r[0])
2116 if idx == -1:
2116 if idx == -1:
2117 return (1, r[1])
2117 return (1, r[1])
2118 else:
2118 else:
2119 return (0, idx)
2119 return (0, idx)
2120 v1records.sort(key=key)
2120 v1records.sort(key=key)
2121 v2records.sort(key=key)
2121 v2records.sort(key=key)
2122
2122
2123 if not v1records and not v2records:
2123 if not v1records and not v2records:
2124 ui.write(('no merge state found\n'))
2124 ui.write(('no merge state found\n'))
2125 elif not v2records:
2125 elif not v2records:
2126 ui.note(('no version 2 merge state\n'))
2126 ui.note(('no version 2 merge state\n'))
2127 printrecords(1)
2127 printrecords(1)
2128 elif ms._v1v2match(v1records, v2records):
2128 elif ms._v1v2match(v1records, v2records):
2129 ui.note(('v1 and v2 states match: using v2\n'))
2129 ui.note(('v1 and v2 states match: using v2\n'))
2130 printrecords(2)
2130 printrecords(2)
2131 else:
2131 else:
2132 ui.note(('v1 and v2 states mismatch: using v1\n'))
2132 ui.note(('v1 and v2 states mismatch: using v1\n'))
2133 printrecords(1)
2133 printrecords(1)
2134 if ui.verbose:
2134 if ui.verbose:
2135 printrecords(2)
2135 printrecords(2)
2136
2136
2137 @command('debugnamecomplete', [], _('NAME...'))
2137 @command('debugnamecomplete', [], _('NAME...'))
2138 def debugnamecomplete(ui, repo, *args):
2138 def debugnamecomplete(ui, repo, *args):
2139 '''complete "names" - tags, open branch names, bookmark names'''
2139 '''complete "names" - tags, open branch names, bookmark names'''
2140
2140
2141 names = set()
2141 names = set()
2142 # since we previously only listed open branches, we will handle that
2142 # since we previously only listed open branches, we will handle that
2143 # specially (after this for loop)
2143 # specially (after this for loop)
2144 for name, ns in repo.names.iteritems():
2144 for name, ns in repo.names.iteritems():
2145 if name != 'branches':
2145 if name != 'branches':
2146 names.update(ns.listnames(repo))
2146 names.update(ns.listnames(repo))
2147 names.update(tag for (tag, heads, tip, closed)
2147 names.update(tag for (tag, heads, tip, closed)
2148 in repo.branchmap().iterbranches() if not closed)
2148 in repo.branchmap().iterbranches() if not closed)
2149 completions = set()
2149 completions = set()
2150 if not args:
2150 if not args:
2151 args = ['']
2151 args = ['']
2152 for a in args:
2152 for a in args:
2153 completions.update(n for n in names if n.startswith(a))
2153 completions.update(n for n in names if n.startswith(a))
2154 ui.write('\n'.join(sorted(completions)))
2154 ui.write('\n'.join(sorted(completions)))
2155 ui.write('\n')
2155 ui.write('\n')
2156
2156
2157 @command('debuglocks',
2157 @command('debuglocks',
2158 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2158 [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')),
2159 ('W', 'force-wlock', None,
2159 ('W', 'force-wlock', None,
2160 _('free the working state lock (DANGEROUS)'))],
2160 _('free the working state lock (DANGEROUS)'))],
2161 _('[OPTION]...'))
2161 _('[OPTION]...'))
2162 def debuglocks(ui, repo, **opts):
2162 def debuglocks(ui, repo, **opts):
2163 """show or modify state of locks
2163 """show or modify state of locks
2164
2164
2165 By default, this command will show which locks are held. This
2165 By default, this command will show which locks are held. This
2166 includes the user and process holding the lock, the amount of time
2166 includes the user and process holding the lock, the amount of time
2167 the lock has been held, and the machine name where the process is
2167 the lock has been held, and the machine name where the process is
2168 running if it's not local.
2168 running if it's not local.
2169
2169
2170 Locks protect the integrity of Mercurial's data, so should be
2170 Locks protect the integrity of Mercurial's data, so should be
2171 treated with care. System crashes or other interruptions may cause
2171 treated with care. System crashes or other interruptions may cause
2172 locks to not be properly released, though Mercurial will usually
2172 locks to not be properly released, though Mercurial will usually
2173 detect and remove such stale locks automatically.
2173 detect and remove such stale locks automatically.
2174
2174
2175 However, detecting stale locks may not always be possible (for
2175 However, detecting stale locks may not always be possible (for
2176 instance, on a shared filesystem). Removing locks may also be
2176 instance, on a shared filesystem). Removing locks may also be
2177 blocked by filesystem permissions.
2177 blocked by filesystem permissions.
2178
2178
2179 Returns 0 if no locks are held.
2179 Returns 0 if no locks are held.
2180
2180
2181 """
2181 """
2182
2182
2183 if opts.get('force_lock'):
2183 if opts.get('force_lock'):
2184 repo.svfs.unlink('lock')
2184 repo.svfs.unlink('lock')
2185 if opts.get('force_wlock'):
2185 if opts.get('force_wlock'):
2186 repo.vfs.unlink('wlock')
2186 repo.vfs.unlink('wlock')
2187 if opts.get('force_lock') or opts.get('force_lock'):
2187 if opts.get('force_lock') or opts.get('force_lock'):
2188 return 0
2188 return 0
2189
2189
2190 now = time.time()
2190 now = time.time()
2191 held = 0
2191 held = 0
2192
2192
2193 def report(vfs, name, method):
2193 def report(vfs, name, method):
2194 # this causes stale locks to get reaped for more accurate reporting
2194 # this causes stale locks to get reaped for more accurate reporting
2195 try:
2195 try:
2196 l = method(False)
2196 l = method(False)
2197 except error.LockHeld:
2197 except error.LockHeld:
2198 l = None
2198 l = None
2199
2199
2200 if l:
2200 if l:
2201 l.release()
2201 l.release()
2202 else:
2202 else:
2203 try:
2203 try:
2204 stat = vfs.lstat(name)
2204 stat = vfs.lstat(name)
2205 age = now - stat.st_mtime
2205 age = now - stat.st_mtime
2206 user = util.username(stat.st_uid)
2206 user = util.username(stat.st_uid)
2207 locker = vfs.readlock(name)
2207 locker = vfs.readlock(name)
2208 if ":" in locker:
2208 if ":" in locker:
2209 host, pid = locker.split(':')
2209 host, pid = locker.split(':')
2210 if host == socket.gethostname():
2210 if host == socket.gethostname():
2211 locker = 'user %s, process %s' % (user, pid)
2211 locker = 'user %s, process %s' % (user, pid)
2212 else:
2212 else:
2213 locker = 'user %s, process %s, host %s' \
2213 locker = 'user %s, process %s, host %s' \
2214 % (user, pid, host)
2214 % (user, pid, host)
2215 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
2215 ui.write(("%-6s %s (%ds)\n") % (name + ":", locker, age))
2216 return 1
2216 return 1
2217 except OSError as e:
2217 except OSError as e:
2218 if e.errno != errno.ENOENT:
2218 if e.errno != errno.ENOENT:
2219 raise
2219 raise
2220
2220
2221 ui.write(("%-6s free\n") % (name + ":"))
2221 ui.write(("%-6s free\n") % (name + ":"))
2222 return 0
2222 return 0
2223
2223
2224 held += report(repo.svfs, "lock", repo.lock)
2224 held += report(repo.svfs, "lock", repo.lock)
2225 held += report(repo.vfs, "wlock", repo.wlock)
2225 held += report(repo.vfs, "wlock", repo.wlock)
2226
2226
2227 return held
2227 return held
2228
2228
2229 @command('debugobsolete',
2229 @command('debugobsolete',
2230 [('', 'flags', 0, _('markers flag')),
2230 [('', 'flags', 0, _('markers flag')),
2231 ('', 'record-parents', False,
2231 ('', 'record-parents', False,
2232 _('record parent information for the precursor')),
2232 _('record parent information for the precursor')),
2233 ('r', 'rev', [], _('display markers relevant to REV')),
2233 ('r', 'rev', [], _('display markers relevant to REV')),
2234 ('', 'index', False, _('display index of the marker')),
2234 ('', 'index', False, _('display index of the marker')),
2235 ('', 'delete', [], _('delete markers specified by indices')),
2235 ('', 'delete', [], _('delete markers specified by indices')),
2236 ] + commitopts2 + formatteropts,
2236 ] + commitopts2 + formatteropts,
2237 _('[OBSOLETED [REPLACEMENT ...]]'))
2237 _('[OBSOLETED [REPLACEMENT ...]]'))
2238 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2238 def debugobsolete(ui, repo, precursor=None, *successors, **opts):
2239 """create arbitrary obsolete marker
2239 """create arbitrary obsolete marker
2240
2240
2241 With no arguments, displays the list of obsolescence markers."""
2241 With no arguments, displays the list of obsolescence markers."""
2242
2242
2243 def parsenodeid(s):
2243 def parsenodeid(s):
2244 try:
2244 try:
2245 # We do not use revsingle/revrange functions here to accept
2245 # We do not use revsingle/revrange functions here to accept
2246 # arbitrary node identifiers, possibly not present in the
2246 # arbitrary node identifiers, possibly not present in the
2247 # local repository.
2247 # local repository.
2248 n = bin(s)
2248 n = bin(s)
2249 if len(n) != len(nullid):
2249 if len(n) != len(nullid):
2250 raise TypeError()
2250 raise TypeError()
2251 return n
2251 return n
2252 except TypeError:
2252 except TypeError:
2253 raise error.Abort('changeset references must be full hexadecimal '
2253 raise error.Abort('changeset references must be full hexadecimal '
2254 'node identifiers')
2254 'node identifiers')
2255
2255
2256 if opts.get('delete'):
2256 if opts.get('delete'):
2257 indices = []
2257 indices = []
2258 for v in opts.get('delete'):
2258 for v in opts.get('delete'):
2259 try:
2259 try:
2260 indices.append(int(v))
2260 indices.append(int(v))
2261 except ValueError:
2261 except ValueError:
2262 raise error.Abort(_('invalid index value: %r') % v,
2262 raise error.Abort(_('invalid index value: %r') % v,
2263 hint=_('use integers for indices'))
2263 hint=_('use integers for indices'))
2264
2264
2265 if repo.currenttransaction():
2265 if repo.currenttransaction():
2266 raise error.Abort(_('cannot delete obsmarkers in the middle '
2266 raise error.Abort(_('cannot delete obsmarkers in the middle '
2267 'of transaction.'))
2267 'of transaction.'))
2268
2268
2269 with repo.lock():
2269 with repo.lock():
2270 n = repair.deleteobsmarkers(repo.obsstore, indices)
2270 n = repair.deleteobsmarkers(repo.obsstore, indices)
2271 ui.write(_('deleted %i obsolescence markers\n') % n)
2271 ui.write(_('deleted %i obsolescence markers\n') % n)
2272
2272
2273 return
2273 return
2274
2274
2275 if precursor is not None:
2275 if precursor is not None:
2276 if opts['rev']:
2276 if opts['rev']:
2277 raise error.Abort('cannot select revision when creating marker')
2277 raise error.Abort('cannot select revision when creating marker')
2278 metadata = {}
2278 metadata = {}
2279 metadata['user'] = opts['user'] or ui.username()
2279 metadata['user'] = opts['user'] or ui.username()
2280 succs = tuple(parsenodeid(succ) for succ in successors)
2280 succs = tuple(parsenodeid(succ) for succ in successors)
2281 l = repo.lock()
2281 l = repo.lock()
2282 try:
2282 try:
2283 tr = repo.transaction('debugobsolete')
2283 tr = repo.transaction('debugobsolete')
2284 try:
2284 try:
2285 date = opts.get('date')
2285 date = opts.get('date')
2286 if date:
2286 if date:
2287 date = util.parsedate(date)
2287 date = util.parsedate(date)
2288 else:
2288 else:
2289 date = None
2289 date = None
2290 prec = parsenodeid(precursor)
2290 prec = parsenodeid(precursor)
2291 parents = None
2291 parents = None
2292 if opts['record_parents']:
2292 if opts['record_parents']:
2293 if prec not in repo.unfiltered():
2293 if prec not in repo.unfiltered():
2294 raise error.Abort('cannot used --record-parents on '
2294 raise error.Abort('cannot used --record-parents on '
2295 'unknown changesets')
2295 'unknown changesets')
2296 parents = repo.unfiltered()[prec].parents()
2296 parents = repo.unfiltered()[prec].parents()
2297 parents = tuple(p.node() for p in parents)
2297 parents = tuple(p.node() for p in parents)
2298 repo.obsstore.create(tr, prec, succs, opts['flags'],
2298 repo.obsstore.create(tr, prec, succs, opts['flags'],
2299 parents=parents, date=date,
2299 parents=parents, date=date,
2300 metadata=metadata)
2300 metadata=metadata)
2301 tr.close()
2301 tr.close()
2302 except ValueError as exc:
2302 except ValueError as exc:
2303 raise error.Abort(_('bad obsmarker input: %s') % exc)
2303 raise error.Abort(_('bad obsmarker input: %s') % exc)
2304 finally:
2304 finally:
2305 tr.release()
2305 tr.release()
2306 finally:
2306 finally:
2307 l.release()
2307 l.release()
2308 else:
2308 else:
2309 if opts['rev']:
2309 if opts['rev']:
2310 revs = scmutil.revrange(repo, opts['rev'])
2310 revs = scmutil.revrange(repo, opts['rev'])
2311 nodes = [repo[r].node() for r in revs]
2311 nodes = [repo[r].node() for r in revs]
2312 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2312 markers = list(obsolete.getmarkers(repo, nodes=nodes))
2313 markers.sort(key=lambda x: x._data)
2313 markers.sort(key=lambda x: x._data)
2314 else:
2314 else:
2315 markers = obsolete.getmarkers(repo)
2315 markers = obsolete.getmarkers(repo)
2316
2316
2317 markerstoiter = markers
2317 markerstoiter = markers
2318 isrelevant = lambda m: True
2318 isrelevant = lambda m: True
2319 if opts.get('rev') and opts.get('index'):
2319 if opts.get('rev') and opts.get('index'):
2320 markerstoiter = obsolete.getmarkers(repo)
2320 markerstoiter = obsolete.getmarkers(repo)
2321 markerset = set(markers)
2321 markerset = set(markers)
2322 isrelevant = lambda m: m in markerset
2322 isrelevant = lambda m: m in markerset
2323
2323
2324 fm = ui.formatter('debugobsolete', opts)
2324 fm = ui.formatter('debugobsolete', opts)
2325 for i, m in enumerate(markerstoiter):
2325 for i, m in enumerate(markerstoiter):
2326 if not isrelevant(m):
2326 if not isrelevant(m):
2327 # marker can be irrelevant when we're iterating over a set
2327 # marker can be irrelevant when we're iterating over a set
2328 # of markers (markerstoiter) which is bigger than the set
2328 # of markers (markerstoiter) which is bigger than the set
2329 # of markers we want to display (markers)
2329 # of markers we want to display (markers)
2330 # this can happen if both --index and --rev options are
2330 # this can happen if both --index and --rev options are
2331 # provided and thus we need to iterate over all of the markers
2331 # provided and thus we need to iterate over all of the markers
2332 # to get the correct indices, but only display the ones that
2332 # to get the correct indices, but only display the ones that
2333 # are relevant to --rev value
2333 # are relevant to --rev value
2334 continue
2334 continue
2335 fm.startitem()
2335 fm.startitem()
2336 ind = i if opts.get('index') else None
2336 ind = i if opts.get('index') else None
2337 cmdutil.showmarker(fm, m, index=ind)
2337 cmdutil.showmarker(fm, m, index=ind)
2338 fm.end()
2338 fm.end()
2339
2339
2340 @command('debugpathcomplete',
2340 @command('debugpathcomplete',
2341 [('f', 'full', None, _('complete an entire path')),
2341 [('f', 'full', None, _('complete an entire path')),
2342 ('n', 'normal', None, _('show only normal files')),
2342 ('n', 'normal', None, _('show only normal files')),
2343 ('a', 'added', None, _('show only added files')),
2343 ('a', 'added', None, _('show only added files')),
2344 ('r', 'removed', None, _('show only removed files'))],
2344 ('r', 'removed', None, _('show only removed files'))],
2345 _('FILESPEC...'))
2345 _('FILESPEC...'))
2346 def debugpathcomplete(ui, repo, *specs, **opts):
2346 def debugpathcomplete(ui, repo, *specs, **opts):
2347 '''complete part or all of a tracked path
2347 '''complete part or all of a tracked path
2348
2348
2349 This command supports shells that offer path name completion. It
2349 This command supports shells that offer path name completion. It
2350 currently completes only files already known to the dirstate.
2350 currently completes only files already known to the dirstate.
2351
2351
2352 Completion extends only to the next path segment unless
2352 Completion extends only to the next path segment unless
2353 --full is specified, in which case entire paths are used.'''
2353 --full is specified, in which case entire paths are used.'''
2354
2354
2355 def complete(path, acceptable):
2355 def complete(path, acceptable):
2356 dirstate = repo.dirstate
2356 dirstate = repo.dirstate
2357 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
2357 spec = os.path.normpath(os.path.join(pycompat.getcwd(), path))
2358 rootdir = repo.root + pycompat.ossep
2358 rootdir = repo.root + pycompat.ossep
2359 if spec != repo.root and not spec.startswith(rootdir):
2359 if spec != repo.root and not spec.startswith(rootdir):
2360 return [], []
2360 return [], []
2361 if os.path.isdir(spec):
2361 if os.path.isdir(spec):
2362 spec += '/'
2362 spec += '/'
2363 spec = spec[len(rootdir):]
2363 spec = spec[len(rootdir):]
2364 fixpaths = pycompat.ossep != '/'
2364 fixpaths = pycompat.ossep != '/'
2365 if fixpaths:
2365 if fixpaths:
2366 spec = spec.replace(pycompat.ossep, '/')
2366 spec = spec.replace(pycompat.ossep, '/')
2367 speclen = len(spec)
2367 speclen = len(spec)
2368 fullpaths = opts['full']
2368 fullpaths = opts['full']
2369 files, dirs = set(), set()
2369 files, dirs = set(), set()
2370 adddir, addfile = dirs.add, files.add
2370 adddir, addfile = dirs.add, files.add
2371 for f, st in dirstate.iteritems():
2371 for f, st in dirstate.iteritems():
2372 if f.startswith(spec) and st[0] in acceptable:
2372 if f.startswith(spec) and st[0] in acceptable:
2373 if fixpaths:
2373 if fixpaths:
2374 f = f.replace('/', pycompat.ossep)
2374 f = f.replace('/', pycompat.ossep)
2375 if fullpaths:
2375 if fullpaths:
2376 addfile(f)
2376 addfile(f)
2377 continue
2377 continue
2378 s = f.find(pycompat.ossep, speclen)
2378 s = f.find(pycompat.ossep, speclen)
2379 if s >= 0:
2379 if s >= 0:
2380 adddir(f[:s])
2380 adddir(f[:s])
2381 else:
2381 else:
2382 addfile(f)
2382 addfile(f)
2383 return files, dirs
2383 return files, dirs
2384
2384
2385 acceptable = ''
2385 acceptable = ''
2386 if opts['normal']:
2386 if opts['normal']:
2387 acceptable += 'nm'
2387 acceptable += 'nm'
2388 if opts['added']:
2388 if opts['added']:
2389 acceptable += 'a'
2389 acceptable += 'a'
2390 if opts['removed']:
2390 if opts['removed']:
2391 acceptable += 'r'
2391 acceptable += 'r'
2392 cwd = repo.getcwd()
2392 cwd = repo.getcwd()
2393 if not specs:
2393 if not specs:
2394 specs = ['.']
2394 specs = ['.']
2395
2395
2396 files, dirs = set(), set()
2396 files, dirs = set(), set()
2397 for spec in specs:
2397 for spec in specs:
2398 f, d = complete(spec, acceptable or 'nmar')
2398 f, d = complete(spec, acceptable or 'nmar')
2399 files.update(f)
2399 files.update(f)
2400 dirs.update(d)
2400 dirs.update(d)
2401 files.update(dirs)
2401 files.update(dirs)
2402 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2402 ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files)))
2403 ui.write('\n')
2403 ui.write('\n')
2404
2404
2405 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2405 @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True)
2406 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2406 def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
2407 '''access the pushkey key/value protocol
2407 '''access the pushkey key/value protocol
2408
2408
2409 With two args, list the keys in the given namespace.
2409 With two args, list the keys in the given namespace.
2410
2410
2411 With five args, set a key to new if it currently is set to old.
2411 With five args, set a key to new if it currently is set to old.
2412 Reports success or failure.
2412 Reports success or failure.
2413 '''
2413 '''
2414
2414
2415 target = hg.peer(ui, {}, repopath)
2415 target = hg.peer(ui, {}, repopath)
2416 if keyinfo:
2416 if keyinfo:
2417 key, old, new = keyinfo
2417 key, old, new = keyinfo
2418 r = target.pushkey(namespace, key, old, new)
2418 r = target.pushkey(namespace, key, old, new)
2419 ui.status(str(r) + '\n')
2419 ui.status(str(r) + '\n')
2420 return not r
2420 return not r
2421 else:
2421 else:
2422 for k, v in sorted(target.listkeys(namespace).iteritems()):
2422 for k, v in sorted(target.listkeys(namespace).iteritems()):
2423 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2423 ui.write("%s\t%s\n" % (k.encode('string-escape'),
2424 v.encode('string-escape')))
2424 v.encode('string-escape')))
2425
2425
2426 @command('debugpvec', [], _('A B'))
2426 @command('debugpvec', [], _('A B'))
2427 def debugpvec(ui, repo, a, b=None):
2427 def debugpvec(ui, repo, a, b=None):
2428 ca = scmutil.revsingle(repo, a)
2428 ca = scmutil.revsingle(repo, a)
2429 cb = scmutil.revsingle(repo, b)
2429 cb = scmutil.revsingle(repo, b)
2430 pa = pvec.ctxpvec(ca)
2430 pa = pvec.ctxpvec(ca)
2431 pb = pvec.ctxpvec(cb)
2431 pb = pvec.ctxpvec(cb)
2432 if pa == pb:
2432 if pa == pb:
2433 rel = "="
2433 rel = "="
2434 elif pa > pb:
2434 elif pa > pb:
2435 rel = ">"
2435 rel = ">"
2436 elif pa < pb:
2436 elif pa < pb:
2437 rel = "<"
2437 rel = "<"
2438 elif pa | pb:
2438 elif pa | pb:
2439 rel = "|"
2439 rel = "|"
2440 ui.write(_("a: %s\n") % pa)
2440 ui.write(_("a: %s\n") % pa)
2441 ui.write(_("b: %s\n") % pb)
2441 ui.write(_("b: %s\n") % pb)
2442 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2442 ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
2443 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2443 ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
2444 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2444 (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
2445 pa.distance(pb), rel))
2445 pa.distance(pb), rel))
2446
2446
2447 @command('debugrebuilddirstate|debugrebuildstate',
2447 @command('debugrebuilddirstate|debugrebuildstate',
2448 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2448 [('r', 'rev', '', _('revision to rebuild to'), _('REV')),
2449 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2449 ('', 'minimal', None, _('only rebuild files that are inconsistent with '
2450 'the working copy parent')),
2450 'the working copy parent')),
2451 ],
2451 ],
2452 _('[-r REV]'))
2452 _('[-r REV]'))
2453 def debugrebuilddirstate(ui, repo, rev, **opts):
2453 def debugrebuilddirstate(ui, repo, rev, **opts):
2454 """rebuild the dirstate as it would look like for the given revision
2454 """rebuild the dirstate as it would look like for the given revision
2455
2455
2456 If no revision is specified the first current parent will be used.
2456 If no revision is specified the first current parent will be used.
2457
2457
2458 The dirstate will be set to the files of the given revision.
2458 The dirstate will be set to the files of the given revision.
2459 The actual working directory content or existing dirstate
2459 The actual working directory content or existing dirstate
2460 information such as adds or removes is not considered.
2460 information such as adds or removes is not considered.
2461
2461
2462 ``minimal`` will only rebuild the dirstate status for files that claim to be
2462 ``minimal`` will only rebuild the dirstate status for files that claim to be
2463 tracked but are not in the parent manifest, or that exist in the parent
2463 tracked but are not in the parent manifest, or that exist in the parent
2464 manifest but are not in the dirstate. It will not change adds, removes, or
2464 manifest but are not in the dirstate. It will not change adds, removes, or
2465 modified files that are in the working copy parent.
2465 modified files that are in the working copy parent.
2466
2466
2467 One use of this command is to make the next :hg:`status` invocation
2467 One use of this command is to make the next :hg:`status` invocation
2468 check the actual file content.
2468 check the actual file content.
2469 """
2469 """
2470 ctx = scmutil.revsingle(repo, rev)
2470 ctx = scmutil.revsingle(repo, rev)
2471 with repo.wlock():
2471 with repo.wlock():
2472 dirstate = repo.dirstate
2472 dirstate = repo.dirstate
2473 changedfiles = None
2473 changedfiles = None
2474 # See command doc for what minimal does.
2474 # See command doc for what minimal does.
2475 if opts.get('minimal'):
2475 if opts.get('minimal'):
2476 manifestfiles = set(ctx.manifest().keys())
2476 manifestfiles = set(ctx.manifest().keys())
2477 dirstatefiles = set(dirstate)
2477 dirstatefiles = set(dirstate)
2478 manifestonly = manifestfiles - dirstatefiles
2478 manifestonly = manifestfiles - dirstatefiles
2479 dsonly = dirstatefiles - manifestfiles
2479 dsonly = dirstatefiles - manifestfiles
2480 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2480 dsnotadded = set(f for f in dsonly if dirstate[f] != 'a')
2481 changedfiles = manifestonly | dsnotadded
2481 changedfiles = manifestonly | dsnotadded
2482
2482
2483 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2483 dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles)
2484
2484
2485 @command('debugrebuildfncache', [], '')
2485 @command('debugrebuildfncache', [], '')
2486 def debugrebuildfncache(ui, repo):
2486 def debugrebuildfncache(ui, repo):
2487 """rebuild the fncache file"""
2487 """rebuild the fncache file"""
2488 repair.rebuildfncache(ui, repo)
2488 repair.rebuildfncache(ui, repo)
2489
2489
2490 @command('debugrename',
2490 @command('debugrename',
2491 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2491 [('r', 'rev', '', _('revision to debug'), _('REV'))],
2492 _('[-r REV] FILE'))
2492 _('[-r REV] FILE'))
2493 def debugrename(ui, repo, file1, *pats, **opts):
2493 def debugrename(ui, repo, file1, *pats, **opts):
2494 """dump rename information"""
2494 """dump rename information"""
2495
2495
2496 ctx = scmutil.revsingle(repo, opts.get('rev'))
2496 ctx = scmutil.revsingle(repo, opts.get('rev'))
2497 m = scmutil.match(ctx, (file1,) + pats, opts)
2497 m = scmutil.match(ctx, (file1,) + pats, opts)
2498 for abs in ctx.walk(m):
2498 for abs in ctx.walk(m):
2499 fctx = ctx[abs]
2499 fctx = ctx[abs]
2500 o = fctx.filelog().renamed(fctx.filenode())
2500 o = fctx.filelog().renamed(fctx.filenode())
2501 rel = m.rel(abs)
2501 rel = m.rel(abs)
2502 if o:
2502 if o:
2503 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2503 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
2504 else:
2504 else:
2505 ui.write(_("%s not renamed\n") % rel)
2505 ui.write(_("%s not renamed\n") % rel)
2506
2506
2507 @command('debugrevlog', debugrevlogopts +
2507 @command('debugrevlog', debugrevlogopts +
2508 [('d', 'dump', False, _('dump index data'))],
2508 [('d', 'dump', False, _('dump index data'))],
2509 _('-c|-m|FILE'),
2509 _('-c|-m|FILE'),
2510 optionalrepo=True)
2510 optionalrepo=True)
2511 def debugrevlog(ui, repo, file_=None, **opts):
2511 def debugrevlog(ui, repo, file_=None, **opts):
2512 """show data and statistics about a revlog"""
2512 """show data and statistics about a revlog"""
2513 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2513 r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts)
2514
2514
2515 if opts.get("dump"):
2515 if opts.get("dump"):
2516 numrevs = len(r)
2516 numrevs = len(r)
2517 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2517 ui.write(("# rev p1rev p2rev start end deltastart base p1 p2"
2518 " rawsize totalsize compression heads chainlen\n"))
2518 " rawsize totalsize compression heads chainlen\n"))
2519 ts = 0
2519 ts = 0
2520 heads = set()
2520 heads = set()
2521
2521
2522 for rev in xrange(numrevs):
2522 for rev in xrange(numrevs):
2523 dbase = r.deltaparent(rev)
2523 dbase = r.deltaparent(rev)
2524 if dbase == -1:
2524 if dbase == -1:
2525 dbase = rev
2525 dbase = rev
2526 cbase = r.chainbase(rev)
2526 cbase = r.chainbase(rev)
2527 clen = r.chainlen(rev)
2527 clen = r.chainlen(rev)
2528 p1, p2 = r.parentrevs(rev)
2528 p1, p2 = r.parentrevs(rev)
2529 rs = r.rawsize(rev)
2529 rs = r.rawsize(rev)
2530 ts = ts + rs
2530 ts = ts + rs
2531 heads -= set(r.parentrevs(rev))
2531 heads -= set(r.parentrevs(rev))
2532 heads.add(rev)
2532 heads.add(rev)
2533 try:
2533 try:
2534 compression = ts / r.end(rev)
2534 compression = ts / r.end(rev)
2535 except ZeroDivisionError:
2535 except ZeroDivisionError:
2536 compression = 0
2536 compression = 0
2537 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2537 ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d "
2538 "%11d %5d %8d\n" %
2538 "%11d %5d %8d\n" %
2539 (rev, p1, p2, r.start(rev), r.end(rev),
2539 (rev, p1, p2, r.start(rev), r.end(rev),
2540 r.start(dbase), r.start(cbase),
2540 r.start(dbase), r.start(cbase),
2541 r.start(p1), r.start(p2),
2541 r.start(p1), r.start(p2),
2542 rs, ts, compression, len(heads), clen))
2542 rs, ts, compression, len(heads), clen))
2543 return 0
2543 return 0
2544
2544
2545 v = r.version
2545 v = r.version
2546 format = v & 0xFFFF
2546 format = v & 0xFFFF
2547 flags = []
2547 flags = []
2548 gdelta = False
2548 gdelta = False
2549 if v & revlog.REVLOGNGINLINEDATA:
2549 if v & revlog.REVLOGNGINLINEDATA:
2550 flags.append('inline')
2550 flags.append('inline')
2551 if v & revlog.REVLOGGENERALDELTA:
2551 if v & revlog.REVLOGGENERALDELTA:
2552 gdelta = True
2552 gdelta = True
2553 flags.append('generaldelta')
2553 flags.append('generaldelta')
2554 if not flags:
2554 if not flags:
2555 flags = ['(none)']
2555 flags = ['(none)']
2556
2556
2557 nummerges = 0
2557 nummerges = 0
2558 numfull = 0
2558 numfull = 0
2559 numprev = 0
2559 numprev = 0
2560 nump1 = 0
2560 nump1 = 0
2561 nump2 = 0
2561 nump2 = 0
2562 numother = 0
2562 numother = 0
2563 nump1prev = 0
2563 nump1prev = 0
2564 nump2prev = 0
2564 nump2prev = 0
2565 chainlengths = []
2565 chainlengths = []
2566
2566
2567 datasize = [None, 0, 0]
2567 datasize = [None, 0, 0]
2568 fullsize = [None, 0, 0]
2568 fullsize = [None, 0, 0]
2569 deltasize = [None, 0, 0]
2569 deltasize = [None, 0, 0]
2570 chunktypecounts = {}
2570 chunktypecounts = {}
2571 chunktypesizes = {}
2571 chunktypesizes = {}
2572
2572
2573 def addsize(size, l):
2573 def addsize(size, l):
2574 if l[0] is None or size < l[0]:
2574 if l[0] is None or size < l[0]:
2575 l[0] = size
2575 l[0] = size
2576 if size > l[1]:
2576 if size > l[1]:
2577 l[1] = size
2577 l[1] = size
2578 l[2] += size
2578 l[2] += size
2579
2579
2580 numrevs = len(r)
2580 numrevs = len(r)
2581 for rev in xrange(numrevs):
2581 for rev in xrange(numrevs):
2582 p1, p2 = r.parentrevs(rev)
2582 p1, p2 = r.parentrevs(rev)
2583 delta = r.deltaparent(rev)
2583 delta = r.deltaparent(rev)
2584 if format > 0:
2584 if format > 0:
2585 addsize(r.rawsize(rev), datasize)
2585 addsize(r.rawsize(rev), datasize)
2586 if p2 != nullrev:
2586 if p2 != nullrev:
2587 nummerges += 1
2587 nummerges += 1
2588 size = r.length(rev)
2588 size = r.length(rev)
2589 if delta == nullrev:
2589 if delta == nullrev:
2590 chainlengths.append(0)
2590 chainlengths.append(0)
2591 numfull += 1
2591 numfull += 1
2592 addsize(size, fullsize)
2592 addsize(size, fullsize)
2593 else:
2593 else:
2594 chainlengths.append(chainlengths[delta] + 1)
2594 chainlengths.append(chainlengths[delta] + 1)
2595 addsize(size, deltasize)
2595 addsize(size, deltasize)
2596 if delta == rev - 1:
2596 if delta == rev - 1:
2597 numprev += 1
2597 numprev += 1
2598 if delta == p1:
2598 if delta == p1:
2599 nump1prev += 1
2599 nump1prev += 1
2600 elif delta == p2:
2600 elif delta == p2:
2601 nump2prev += 1
2601 nump2prev += 1
2602 elif delta == p1:
2602 elif delta == p1:
2603 nump1 += 1
2603 nump1 += 1
2604 elif delta == p2:
2604 elif delta == p2:
2605 nump2 += 1
2605 nump2 += 1
2606 elif delta != nullrev:
2606 elif delta != nullrev:
2607 numother += 1
2607 numother += 1
2608
2608
2609 # Obtain data on the raw chunks in the revlog.
2609 # Obtain data on the raw chunks in the revlog.
2610 chunk = r._chunkraw(rev, rev)[1]
2610 chunk = r._chunkraw(rev, rev)[1]
2611 if chunk:
2611 if chunk:
2612 chunktype = chunk[0]
2612 chunktype = chunk[0]
2613 else:
2613 else:
2614 chunktype = 'empty'
2614 chunktype = 'empty'
2615
2615
2616 if chunktype not in chunktypecounts:
2616 if chunktype not in chunktypecounts:
2617 chunktypecounts[chunktype] = 0
2617 chunktypecounts[chunktype] = 0
2618 chunktypesizes[chunktype] = 0
2618 chunktypesizes[chunktype] = 0
2619
2619
2620 chunktypecounts[chunktype] += 1
2620 chunktypecounts[chunktype] += 1
2621 chunktypesizes[chunktype] += size
2621 chunktypesizes[chunktype] += size
2622
2622
2623 # Adjust size min value for empty cases
2623 # Adjust size min value for empty cases
2624 for size in (datasize, fullsize, deltasize):
2624 for size in (datasize, fullsize, deltasize):
2625 if size[0] is None:
2625 if size[0] is None:
2626 size[0] = 0
2626 size[0] = 0
2627
2627
2628 numdeltas = numrevs - numfull
2628 numdeltas = numrevs - numfull
2629 numoprev = numprev - nump1prev - nump2prev
2629 numoprev = numprev - nump1prev - nump2prev
2630 totalrawsize = datasize[2]
2630 totalrawsize = datasize[2]
2631 datasize[2] /= numrevs
2631 datasize[2] /= numrevs
2632 fulltotal = fullsize[2]
2632 fulltotal = fullsize[2]
2633 fullsize[2] /= numfull
2633 fullsize[2] /= numfull
2634 deltatotal = deltasize[2]
2634 deltatotal = deltasize[2]
2635 if numrevs - numfull > 0:
2635 if numrevs - numfull > 0:
2636 deltasize[2] /= numrevs - numfull
2636 deltasize[2] /= numrevs - numfull
2637 totalsize = fulltotal + deltatotal
2637 totalsize = fulltotal + deltatotal
2638 avgchainlen = sum(chainlengths) / numrevs
2638 avgchainlen = sum(chainlengths) / numrevs
2639 maxchainlen = max(chainlengths)
2639 maxchainlen = max(chainlengths)
2640 compratio = 1
2640 compratio = 1
2641 if totalsize:
2641 if totalsize:
2642 compratio = totalrawsize / totalsize
2642 compratio = totalrawsize / totalsize
2643
2643
2644 basedfmtstr = '%%%dd\n'
2644 basedfmtstr = '%%%dd\n'
2645 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2645 basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n'
2646
2646
2647 def dfmtstr(max):
2647 def dfmtstr(max):
2648 return basedfmtstr % len(str(max))
2648 return basedfmtstr % len(str(max))
2649 def pcfmtstr(max, padding=0):
2649 def pcfmtstr(max, padding=0):
2650 return basepcfmtstr % (len(str(max)), ' ' * padding)
2650 return basepcfmtstr % (len(str(max)), ' ' * padding)
2651
2651
2652 def pcfmt(value, total):
2652 def pcfmt(value, total):
2653 if total:
2653 if total:
2654 return (value, 100 * float(value) / total)
2654 return (value, 100 * float(value) / total)
2655 else:
2655 else:
2656 return value, 100.0
2656 return value, 100.0
2657
2657
2658 ui.write(('format : %d\n') % format)
2658 ui.write(('format : %d\n') % format)
2659 ui.write(('flags : %s\n') % ', '.join(flags))
2659 ui.write(('flags : %s\n') % ', '.join(flags))
2660
2660
2661 ui.write('\n')
2661 ui.write('\n')
2662 fmt = pcfmtstr(totalsize)
2662 fmt = pcfmtstr(totalsize)
2663 fmt2 = dfmtstr(totalsize)
2663 fmt2 = dfmtstr(totalsize)
2664 ui.write(('revisions : ') + fmt2 % numrevs)
2664 ui.write(('revisions : ') + fmt2 % numrevs)
2665 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2665 ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs))
2666 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2666 ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs))
2667 ui.write(('revisions : ') + fmt2 % numrevs)
2667 ui.write(('revisions : ') + fmt2 % numrevs)
2668 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2668 ui.write((' full : ') + fmt % pcfmt(numfull, numrevs))
2669 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2669 ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs))
2670 ui.write(('revision size : ') + fmt2 % totalsize)
2670 ui.write(('revision size : ') + fmt2 % totalsize)
2671 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2671 ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize))
2672 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2672 ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize))
2673
2673
2674 def fmtchunktype(chunktype):
2674 def fmtchunktype(chunktype):
2675 if chunktype == 'empty':
2675 if chunktype == 'empty':
2676 return ' %s : ' % chunktype
2676 return ' %s : ' % chunktype
2677 elif chunktype in string.ascii_letters:
2677 elif chunktype in string.ascii_letters:
2678 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2678 return ' 0x%s (%s) : ' % (hex(chunktype), chunktype)
2679 else:
2679 else:
2680 return ' 0x%s : ' % hex(chunktype)
2680 return ' 0x%s : ' % hex(chunktype)
2681
2681
2682 ui.write('\n')
2682 ui.write('\n')
2683 ui.write(('chunks : ') + fmt2 % numrevs)
2683 ui.write(('chunks : ') + fmt2 % numrevs)
2684 for chunktype in sorted(chunktypecounts):
2684 for chunktype in sorted(chunktypecounts):
2685 ui.write(fmtchunktype(chunktype))
2685 ui.write(fmtchunktype(chunktype))
2686 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2686 ui.write(fmt % pcfmt(chunktypecounts[chunktype], numrevs))
2687 ui.write(('chunks size : ') + fmt2 % totalsize)
2687 ui.write(('chunks size : ') + fmt2 % totalsize)
2688 for chunktype in sorted(chunktypecounts):
2688 for chunktype in sorted(chunktypecounts):
2689 ui.write(fmtchunktype(chunktype))
2689 ui.write(fmtchunktype(chunktype))
2690 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2690 ui.write(fmt % pcfmt(chunktypesizes[chunktype], totalsize))
2691
2691
2692 ui.write('\n')
2692 ui.write('\n')
2693 fmt = dfmtstr(max(avgchainlen, compratio))
2693 fmt = dfmtstr(max(avgchainlen, compratio))
2694 ui.write(('avg chain length : ') + fmt % avgchainlen)
2694 ui.write(('avg chain length : ') + fmt % avgchainlen)
2695 ui.write(('max chain length : ') + fmt % maxchainlen)
2695 ui.write(('max chain length : ') + fmt % maxchainlen)
2696 ui.write(('compression ratio : ') + fmt % compratio)
2696 ui.write(('compression ratio : ') + fmt % compratio)
2697
2697
2698 if format > 0:
2698 if format > 0:
2699 ui.write('\n')
2699 ui.write('\n')
2700 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2700 ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n')
2701 % tuple(datasize))
2701 % tuple(datasize))
2702 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2702 ui.write(('full revision size (min/max/avg) : %d / %d / %d\n')
2703 % tuple(fullsize))
2703 % tuple(fullsize))
2704 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2704 ui.write(('delta size (min/max/avg) : %d / %d / %d\n')
2705 % tuple(deltasize))
2705 % tuple(deltasize))
2706
2706
2707 if numdeltas > 0:
2707 if numdeltas > 0:
2708 ui.write('\n')
2708 ui.write('\n')
2709 fmt = pcfmtstr(numdeltas)
2709 fmt = pcfmtstr(numdeltas)
2710 fmt2 = pcfmtstr(numdeltas, 4)
2710 fmt2 = pcfmtstr(numdeltas, 4)
2711 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2711 ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas))
2712 if numprev > 0:
2712 if numprev > 0:
2713 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2713 ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev,
2714 numprev))
2714 numprev))
2715 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2715 ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev,
2716 numprev))
2716 numprev))
2717 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2717 ui.write((' other : ') + fmt2 % pcfmt(numoprev,
2718 numprev))
2718 numprev))
2719 if gdelta:
2719 if gdelta:
2720 ui.write(('deltas against p1 : ')
2720 ui.write(('deltas against p1 : ')
2721 + fmt % pcfmt(nump1, numdeltas))
2721 + fmt % pcfmt(nump1, numdeltas))
2722 ui.write(('deltas against p2 : ')
2722 ui.write(('deltas against p2 : ')
2723 + fmt % pcfmt(nump2, numdeltas))
2723 + fmt % pcfmt(nump2, numdeltas))
2724 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2724 ui.write(('deltas against other : ') + fmt % pcfmt(numother,
2725 numdeltas))
2725 numdeltas))
2726
2726
2727 @command('debugrevspec',
2727 @command('debugrevspec',
2728 [('', 'optimize', None,
2728 [('', 'optimize', None,
2729 _('print parsed tree after optimizing (DEPRECATED)')),
2729 _('print parsed tree after optimizing (DEPRECATED)')),
2730 ('p', 'show-stage', [],
2730 ('p', 'show-stage', [],
2731 _('print parsed tree at the given stage'), _('NAME')),
2731 _('print parsed tree at the given stage'), _('NAME')),
2732 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2732 ('', 'no-optimized', False, _('evaluate tree without optimization')),
2733 ('', 'verify-optimized', False, _('verify optimized result')),
2733 ('', 'verify-optimized', False, _('verify optimized result')),
2734 ],
2734 ],
2735 ('REVSPEC'))
2735 ('REVSPEC'))
2736 def debugrevspec(ui, repo, expr, **opts):
2736 def debugrevspec(ui, repo, expr, **opts):
2737 """parse and apply a revision specification
2737 """parse and apply a revision specification
2738
2738
2739 Use -p/--show-stage option to print the parsed tree at the given stages.
2739 Use -p/--show-stage option to print the parsed tree at the given stages.
2740 Use -p all to print tree at every stage.
2740 Use -p all to print tree at every stage.
2741
2741
2742 Use --verify-optimized to compare the optimized result with the unoptimized
2742 Use --verify-optimized to compare the optimized result with the unoptimized
2743 one. Returns 1 if the optimized result differs.
2743 one. Returns 1 if the optimized result differs.
2744 """
2744 """
2745 stages = [
2745 stages = [
2746 ('parsed', lambda tree: tree),
2746 ('parsed', lambda tree: tree),
2747 ('expanded', lambda tree: revset.expandaliases(ui, tree)),
2747 ('expanded', lambda tree: revset.expandaliases(ui, tree)),
2748 ('concatenated', revset.foldconcat),
2748 ('concatenated', revset.foldconcat),
2749 ('analyzed', revset.analyze),
2749 ('analyzed', revset.analyze),
2750 ('optimized', revset.optimize),
2750 ('optimized', revset.optimize),
2751 ]
2751 ]
2752 if opts['no_optimized']:
2752 if opts['no_optimized']:
2753 stages = stages[:-1]
2753 stages = stages[:-1]
2754 if opts['verify_optimized'] and opts['no_optimized']:
2754 if opts['verify_optimized'] and opts['no_optimized']:
2755 raise error.Abort(_('cannot use --verify-optimized with '
2755 raise error.Abort(_('cannot use --verify-optimized with '
2756 '--no-optimized'))
2756 '--no-optimized'))
2757 stagenames = set(n for n, f in stages)
2757 stagenames = set(n for n, f in stages)
2758
2758
2759 showalways = set()
2759 showalways = set()
2760 showchanged = set()
2760 showchanged = set()
2761 if ui.verbose and not opts['show_stage']:
2761 if ui.verbose and not opts['show_stage']:
2762 # show parsed tree by --verbose (deprecated)
2762 # show parsed tree by --verbose (deprecated)
2763 showalways.add('parsed')
2763 showalways.add('parsed')
2764 showchanged.update(['expanded', 'concatenated'])
2764 showchanged.update(['expanded', 'concatenated'])
2765 if opts['optimize']:
2765 if opts['optimize']:
2766 showalways.add('optimized')
2766 showalways.add('optimized')
2767 if opts['show_stage'] and opts['optimize']:
2767 if opts['show_stage'] and opts['optimize']:
2768 raise error.Abort(_('cannot use --optimize with --show-stage'))
2768 raise error.Abort(_('cannot use --optimize with --show-stage'))
2769 if opts['show_stage'] == ['all']:
2769 if opts['show_stage'] == ['all']:
2770 showalways.update(stagenames)
2770 showalways.update(stagenames)
2771 else:
2771 else:
2772 for n in opts['show_stage']:
2772 for n in opts['show_stage']:
2773 if n not in stagenames:
2773 if n not in stagenames:
2774 raise error.Abort(_('invalid stage name: %s') % n)
2774 raise error.Abort(_('invalid stage name: %s') % n)
2775 showalways.update(opts['show_stage'])
2775 showalways.update(opts['show_stage'])
2776
2776
2777 treebystage = {}
2777 treebystage = {}
2778 printedtree = None
2778 printedtree = None
2779 tree = revset.parse(expr, lookup=repo.__contains__)
2779 tree = revset.parse(expr, lookup=repo.__contains__)
2780 for n, f in stages:
2780 for n, f in stages:
2781 treebystage[n] = tree = f(tree)
2781 treebystage[n] = tree = f(tree)
2782 if n in showalways or (n in showchanged and tree != printedtree):
2782 if n in showalways or (n in showchanged and tree != printedtree):
2783 if opts['show_stage'] or n != 'parsed':
2783 if opts['show_stage'] or n != 'parsed':
2784 ui.write(("* %s:\n") % n)
2784 ui.write(("* %s:\n") % n)
2785 ui.write(revset.prettyformat(tree), "\n")
2785 ui.write(revset.prettyformat(tree), "\n")
2786 printedtree = tree
2786 printedtree = tree
2787
2787
2788 if opts['verify_optimized']:
2788 if opts['verify_optimized']:
2789 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2789 arevs = revset.makematcher(treebystage['analyzed'])(repo)
2790 brevs = revset.makematcher(treebystage['optimized'])(repo)
2790 brevs = revset.makematcher(treebystage['optimized'])(repo)
2791 if ui.verbose:
2791 if ui.verbose:
2792 ui.note(("* analyzed set:\n"), revset.prettyformatset(arevs), "\n")
2792 ui.note(("* analyzed set:\n"), revset.prettyformatset(arevs), "\n")
2793 ui.note(("* optimized set:\n"), revset.prettyformatset(brevs), "\n")
2793 ui.note(("* optimized set:\n"), revset.prettyformatset(brevs), "\n")
2794 arevs = list(arevs)
2794 arevs = list(arevs)
2795 brevs = list(brevs)
2795 brevs = list(brevs)
2796 if arevs == brevs:
2796 if arevs == brevs:
2797 return 0
2797 return 0
2798 ui.write(('--- analyzed\n'), label='diff.file_a')
2798 ui.write(('--- analyzed\n'), label='diff.file_a')
2799 ui.write(('+++ optimized\n'), label='diff.file_b')
2799 ui.write(('+++ optimized\n'), label='diff.file_b')
2800 sm = difflib.SequenceMatcher(None, arevs, brevs)
2800 sm = difflib.SequenceMatcher(None, arevs, brevs)
2801 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2801 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
2802 if tag in ('delete', 'replace'):
2802 if tag in ('delete', 'replace'):
2803 for c in arevs[alo:ahi]:
2803 for c in arevs[alo:ahi]:
2804 ui.write('-%s\n' % c, label='diff.deleted')
2804 ui.write('-%s\n' % c, label='diff.deleted')
2805 if tag in ('insert', 'replace'):
2805 if tag in ('insert', 'replace'):
2806 for c in brevs[blo:bhi]:
2806 for c in brevs[blo:bhi]:
2807 ui.write('+%s\n' % c, label='diff.inserted')
2807 ui.write('+%s\n' % c, label='diff.inserted')
2808 if tag == 'equal':
2808 if tag == 'equal':
2809 for c in arevs[alo:ahi]:
2809 for c in arevs[alo:ahi]:
2810 ui.write(' %s\n' % c)
2810 ui.write(' %s\n' % c)
2811 return 1
2811 return 1
2812
2812
2813 func = revset.makematcher(tree)
2813 func = revset.makematcher(tree)
2814 revs = func(repo)
2814 revs = func(repo)
2815 if ui.verbose:
2815 if ui.verbose:
2816 ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
2816 ui.note(("* set:\n"), revset.prettyformatset(revs), "\n")
2817 for c in revs:
2817 for c in revs:
2818 ui.write("%s\n" % c)
2818 ui.write("%s\n" % c)
2819
2819
2820 @command('debugsetparents', [], _('REV1 [REV2]'))
2820 @command('debugsetparents', [], _('REV1 [REV2]'))
2821 def debugsetparents(ui, repo, rev1, rev2=None):
2821 def debugsetparents(ui, repo, rev1, rev2=None):
2822 """manually set the parents of the current working directory
2822 """manually set the parents of the current working directory
2823
2823
2824 This is useful for writing repository conversion tools, but should
2824 This is useful for writing repository conversion tools, but should
2825 be used with care. For example, neither the working directory nor the
2825 be used with care. For example, neither the working directory nor the
2826 dirstate is updated, so file status may be incorrect after running this
2826 dirstate is updated, so file status may be incorrect after running this
2827 command.
2827 command.
2828
2828
2829 Returns 0 on success.
2829 Returns 0 on success.
2830 """
2830 """
2831
2831
2832 r1 = scmutil.revsingle(repo, rev1).node()
2832 r1 = scmutil.revsingle(repo, rev1).node()
2833 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2833 r2 = scmutil.revsingle(repo, rev2, 'null').node()
2834
2834
2835 with repo.wlock():
2835 with repo.wlock():
2836 repo.setparents(r1, r2)
2836 repo.setparents(r1, r2)
2837
2837
2838 @command('debugdirstate|debugstate',
2838 @command('debugdirstate|debugstate',
2839 [('', 'nodates', None, _('do not display the saved mtime')),
2839 [('', 'nodates', None, _('do not display the saved mtime')),
2840 ('', 'datesort', None, _('sort by saved mtime'))],
2840 ('', 'datesort', None, _('sort by saved mtime'))],
2841 _('[OPTION]...'))
2841 _('[OPTION]...'))
2842 def debugstate(ui, repo, **opts):
2842 def debugstate(ui, repo, **opts):
2843 """show the contents of the current dirstate"""
2843 """show the contents of the current dirstate"""
2844
2844
2845 nodates = opts.get('nodates')
2845 nodates = opts.get('nodates')
2846 datesort = opts.get('datesort')
2846 datesort = opts.get('datesort')
2847
2847
2848 timestr = ""
2848 timestr = ""
2849 if datesort:
2849 if datesort:
2850 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2850 keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename
2851 else:
2851 else:
2852 keyfunc = None # sort by filename
2852 keyfunc = None # sort by filename
2853 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2853 for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc):
2854 if ent[3] == -1:
2854 if ent[3] == -1:
2855 timestr = 'unset '
2855 timestr = 'unset '
2856 elif nodates:
2856 elif nodates:
2857 timestr = 'set '
2857 timestr = 'set '
2858 else:
2858 else:
2859 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2859 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
2860 time.localtime(ent[3]))
2860 time.localtime(ent[3]))
2861 if ent[1] & 0o20000:
2861 if ent[1] & 0o20000:
2862 mode = 'lnk'
2862 mode = 'lnk'
2863 else:
2863 else:
2864 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
2864 mode = '%3o' % (ent[1] & 0o777 & ~util.umask)
2865 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2865 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
2866 for f in repo.dirstate.copies():
2866 for f in repo.dirstate.copies():
2867 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2867 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
2868
2868
2869 @command('debugsub',
2869 @command('debugsub',
2870 [('r', 'rev', '',
2870 [('r', 'rev', '',
2871 _('revision to check'), _('REV'))],
2871 _('revision to check'), _('REV'))],
2872 _('[-r REV] [REV]'))
2872 _('[-r REV] [REV]'))
2873 def debugsub(ui, repo, rev=None):
2873 def debugsub(ui, repo, rev=None):
2874 ctx = scmutil.revsingle(repo, rev, None)
2874 ctx = scmutil.revsingle(repo, rev, None)
2875 for k, v in sorted(ctx.substate.items()):
2875 for k, v in sorted(ctx.substate.items()):
2876 ui.write(('path %s\n') % k)
2876 ui.write(('path %s\n') % k)
2877 ui.write((' source %s\n') % v[0])
2877 ui.write((' source %s\n') % v[0])
2878 ui.write((' revision %s\n') % v[1])
2878 ui.write((' revision %s\n') % v[1])
2879
2879
2880 @command('debugsuccessorssets',
2880 @command('debugsuccessorssets',
2881 [],
2881 [],
2882 _('[REV]'))
2882 _('[REV]'))
2883 def debugsuccessorssets(ui, repo, *revs):
2883 def debugsuccessorssets(ui, repo, *revs):
2884 """show set of successors for revision
2884 """show set of successors for revision
2885
2885
2886 A successors set of changeset A is a consistent group of revisions that
2886 A successors set of changeset A is a consistent group of revisions that
2887 succeed A. It contains non-obsolete changesets only.
2887 succeed A. It contains non-obsolete changesets only.
2888
2888
2889 In most cases a changeset A has a single successors set containing a single
2889 In most cases a changeset A has a single successors set containing a single
2890 successor (changeset A replaced by A').
2890 successor (changeset A replaced by A').
2891
2891
2892 A changeset that is made obsolete with no successors are called "pruned".
2892 A changeset that is made obsolete with no successors are called "pruned".
2893 Such changesets have no successors sets at all.
2893 Such changesets have no successors sets at all.
2894
2894
2895 A changeset that has been "split" will have a successors set containing
2895 A changeset that has been "split" will have a successors set containing
2896 more than one successor.
2896 more than one successor.
2897
2897
2898 A changeset that has been rewritten in multiple different ways is called
2898 A changeset that has been rewritten in multiple different ways is called
2899 "divergent". Such changesets have multiple successor sets (each of which
2899 "divergent". Such changesets have multiple successor sets (each of which
2900 may also be split, i.e. have multiple successors).
2900 may also be split, i.e. have multiple successors).
2901
2901
2902 Results are displayed as follows::
2902 Results are displayed as follows::
2903
2903
2904 <rev1>
2904 <rev1>
2905 <successors-1A>
2905 <successors-1A>
2906 <rev2>
2906 <rev2>
2907 <successors-2A>
2907 <successors-2A>
2908 <successors-2B1> <successors-2B2> <successors-2B3>
2908 <successors-2B1> <successors-2B2> <successors-2B3>
2909
2909
2910 Here rev2 has two possible (i.e. divergent) successors sets. The first
2910 Here rev2 has two possible (i.e. divergent) successors sets. The first
2911 holds one element, whereas the second holds three (i.e. the changeset has
2911 holds one element, whereas the second holds three (i.e. the changeset has
2912 been split).
2912 been split).
2913 """
2913 """
2914 # passed to successorssets caching computation from one call to another
2914 # passed to successorssets caching computation from one call to another
2915 cache = {}
2915 cache = {}
2916 ctx2str = str
2916 ctx2str = str
2917 node2str = short
2917 node2str = short
2918 if ui.debug():
2918 if ui.debug():
2919 def ctx2str(ctx):
2919 def ctx2str(ctx):
2920 return ctx.hex()
2920 return ctx.hex()
2921 node2str = hex
2921 node2str = hex
2922 for rev in scmutil.revrange(repo, revs):
2922 for rev in scmutil.revrange(repo, revs):
2923 ctx = repo[rev]
2923 ctx = repo[rev]
2924 ui.write('%s\n'% ctx2str(ctx))
2924 ui.write('%s\n'% ctx2str(ctx))
2925 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2925 for succsset in obsolete.successorssets(repo, ctx.node(), cache):
2926 if succsset:
2926 if succsset:
2927 ui.write(' ')
2927 ui.write(' ')
2928 ui.write(node2str(succsset[0]))
2928 ui.write(node2str(succsset[0]))
2929 for node in succsset[1:]:
2929 for node in succsset[1:]:
2930 ui.write(' ')
2930 ui.write(' ')
2931 ui.write(node2str(node))
2931 ui.write(node2str(node))
2932 ui.write('\n')
2932 ui.write('\n')
2933
2933
2934 @command('debugtemplate',
2934 @command('debugtemplate',
2935 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2935 [('r', 'rev', [], _('apply template on changesets'), _('REV')),
2936 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2936 ('D', 'define', [], _('define template keyword'), _('KEY=VALUE'))],
2937 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2937 _('[-r REV]... [-D KEY=VALUE]... TEMPLATE'),
2938 optionalrepo=True)
2938 optionalrepo=True)
2939 def debugtemplate(ui, repo, tmpl, **opts):
2939 def debugtemplate(ui, repo, tmpl, **opts):
2940 """parse and apply a template
2940 """parse and apply a template
2941
2941
2942 If -r/--rev is given, the template is processed as a log template and
2942 If -r/--rev is given, the template is processed as a log template and
2943 applied to the given changesets. Otherwise, it is processed as a generic
2943 applied to the given changesets. Otherwise, it is processed as a generic
2944 template.
2944 template.
2945
2945
2946 Use --verbose to print the parsed tree.
2946 Use --verbose to print the parsed tree.
2947 """
2947 """
2948 revs = None
2948 revs = None
2949 if opts['rev']:
2949 if opts['rev']:
2950 if repo is None:
2950 if repo is None:
2951 raise error.RepoError(_('there is no Mercurial repository here '
2951 raise error.RepoError(_('there is no Mercurial repository here '
2952 '(.hg not found)'))
2952 '(.hg not found)'))
2953 revs = scmutil.revrange(repo, opts['rev'])
2953 revs = scmutil.revrange(repo, opts['rev'])
2954
2954
2955 props = {}
2955 props = {}
2956 for d in opts['define']:
2956 for d in opts['define']:
2957 try:
2957 try:
2958 k, v = (e.strip() for e in d.split('=', 1))
2958 k, v = (e.strip() for e in d.split('=', 1))
2959 if not k:
2959 if not k:
2960 raise ValueError
2960 raise ValueError
2961 props[k] = v
2961 props[k] = v
2962 except ValueError:
2962 except ValueError:
2963 raise error.Abort(_('malformed keyword definition: %s') % d)
2963 raise error.Abort(_('malformed keyword definition: %s') % d)
2964
2964
2965 if ui.verbose:
2965 if ui.verbose:
2966 aliases = ui.configitems('templatealias')
2966 aliases = ui.configitems('templatealias')
2967 tree = templater.parse(tmpl)
2967 tree = templater.parse(tmpl)
2968 ui.note(templater.prettyformat(tree), '\n')
2968 ui.note(templater.prettyformat(tree), '\n')
2969 newtree = templater.expandaliases(tree, aliases)
2969 newtree = templater.expandaliases(tree, aliases)
2970 if newtree != tree:
2970 if newtree != tree:
2971 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2971 ui.note(("* expanded:\n"), templater.prettyformat(newtree), '\n')
2972
2972
2973 mapfile = None
2973 mapfile = None
2974 if revs is None:
2974 if revs is None:
2975 k = 'debugtemplate'
2975 k = 'debugtemplate'
2976 t = formatter.maketemplater(ui, k, tmpl)
2976 t = formatter.maketemplater(ui, k, tmpl)
2977 ui.write(templater.stringify(t(k, **props)))
2977 ui.write(templater.stringify(t(k, **props)))
2978 else:
2978 else:
2979 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2979 displayer = cmdutil.changeset_templater(ui, repo, None, opts, tmpl,
2980 mapfile, buffered=False)
2980 mapfile, buffered=False)
2981 for r in revs:
2981 for r in revs:
2982 displayer.show(repo[r], **props)
2982 displayer.show(repo[r], **props)
2983 displayer.close()
2983 displayer.close()
2984
2984
2985 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2985 @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True)
2986 def debugwalk(ui, repo, *pats, **opts):
2986 def debugwalk(ui, repo, *pats, **opts):
2987 """show how files match on given patterns"""
2987 """show how files match on given patterns"""
2988 m = scmutil.match(repo[None], pats, opts)
2988 m = scmutil.match(repo[None], pats, opts)
2989 items = list(repo.walk(m))
2989 items = list(repo.walk(m))
2990 if not items:
2990 if not items:
2991 return
2991 return
2992 f = lambda fn: fn
2992 f = lambda fn: fn
2993 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2993 if ui.configbool('ui', 'slash') and pycompat.ossep != '/':
2994 f = lambda fn: util.normpath(fn)
2994 f = lambda fn: util.normpath(fn)
2995 fmt = 'f %%-%ds %%-%ds %%s' % (
2995 fmt = 'f %%-%ds %%-%ds %%s' % (
2996 max([len(abs) for abs in items]),
2996 max([len(abs) for abs in items]),
2997 max([len(m.rel(abs)) for abs in items]))
2997 max([len(m.rel(abs)) for abs in items]))
2998 for abs in items:
2998 for abs in items:
2999 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
2999 line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
3000 ui.write("%s\n" % line.rstrip())
3000 ui.write("%s\n" % line.rstrip())
3001
3001
3002 @command('debugwireargs',
3002 @command('debugwireargs',
3003 [('', 'three', '', 'three'),
3003 [('', 'three', '', 'three'),
3004 ('', 'four', '', 'four'),
3004 ('', 'four', '', 'four'),
3005 ('', 'five', '', 'five'),
3005 ('', 'five', '', 'five'),
3006 ] + remoteopts,
3006 ] + remoteopts,
3007 _('REPO [OPTIONS]... [ONE [TWO]]'),
3007 _('REPO [OPTIONS]... [ONE [TWO]]'),
3008 norepo=True)
3008 norepo=True)
3009 def debugwireargs(ui, repopath, *vals, **opts):
3009 def debugwireargs(ui, repopath, *vals, **opts):
3010 repo = hg.peer(ui, opts, repopath)
3010 repo = hg.peer(ui, opts, repopath)
3011 for opt in remoteopts:
3011 for opt in remoteopts:
3012 del opts[opt[1]]
3012 del opts[opt[1]]
3013 args = {}
3013 args = {}
3014 for k, v in opts.iteritems():
3014 for k, v in opts.iteritems():
3015 if v:
3015 if v:
3016 args[k] = v
3016 args[k] = v
3017 # run twice to check that we don't mess up the stream for the next command
3017 # run twice to check that we don't mess up the stream for the next command
3018 res1 = repo.debugwireargs(*vals, **args)
3018 res1 = repo.debugwireargs(*vals, **args)
3019 res2 = repo.debugwireargs(*vals, **args)
3019 res2 = repo.debugwireargs(*vals, **args)
3020 ui.write("%s\n" % res1)
3020 ui.write("%s\n" % res1)
3021 if res1 != res2:
3021 if res1 != res2:
3022 ui.warn("%s\n" % res2)
3022 ui.warn("%s\n" % res2)
3023
3023
3024 @command('^diff',
3024 @command('^diff',
3025 [('r', 'rev', [], _('revision'), _('REV')),
3025 [('r', 'rev', [], _('revision'), _('REV')),
3026 ('c', 'change', '', _('change made by revision'), _('REV'))
3026 ('c', 'change', '', _('change made by revision'), _('REV'))
3027 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3027 ] + diffopts + diffopts2 + walkopts + subrepoopts,
3028 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3028 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'),
3029 inferrepo=True)
3029 inferrepo=True)
3030 def diff(ui, repo, *pats, **opts):
3030 def diff(ui, repo, *pats, **opts):
3031 """diff repository (or selected files)
3031 """diff repository (or selected files)
3032
3032
3033 Show differences between revisions for the specified files.
3033 Show differences between revisions for the specified files.
3034
3034
3035 Differences between files are shown using the unified diff format.
3035 Differences between files are shown using the unified diff format.
3036
3036
3037 .. note::
3037 .. note::
3038
3038
3039 :hg:`diff` may generate unexpected results for merges, as it will
3039 :hg:`diff` may generate unexpected results for merges, as it will
3040 default to comparing against the working directory's first
3040 default to comparing against the working directory's first
3041 parent changeset if no revisions are specified.
3041 parent changeset if no revisions are specified.
3042
3042
3043 When two revision arguments are given, then changes are shown
3043 When two revision arguments are given, then changes are shown
3044 between those revisions. If only one revision is specified then
3044 between those revisions. If only one revision is specified then
3045 that revision is compared to the working directory, and, when no
3045 that revision is compared to the working directory, and, when no
3046 revisions are specified, the working directory files are compared
3046 revisions are specified, the working directory files are compared
3047 to its first parent.
3047 to its first parent.
3048
3048
3049 Alternatively you can specify -c/--change with a revision to see
3049 Alternatively you can specify -c/--change with a revision to see
3050 the changes in that changeset relative to its first parent.
3050 the changes in that changeset relative to its first parent.
3051
3051
3052 Without the -a/--text option, diff will avoid generating diffs of
3052 Without the -a/--text option, diff will avoid generating diffs of
3053 files it detects as binary. With -a, diff will generate a diff
3053 files it detects as binary. With -a, diff will generate a diff
3054 anyway, probably with undesirable results.
3054 anyway, probably with undesirable results.
3055
3055
3056 Use the -g/--git option to generate diffs in the git extended diff
3056 Use the -g/--git option to generate diffs in the git extended diff
3057 format. For more information, read :hg:`help diffs`.
3057 format. For more information, read :hg:`help diffs`.
3058
3058
3059 .. container:: verbose
3059 .. container:: verbose
3060
3060
3061 Examples:
3061 Examples:
3062
3062
3063 - compare a file in the current working directory to its parent::
3063 - compare a file in the current working directory to its parent::
3064
3064
3065 hg diff foo.c
3065 hg diff foo.c
3066
3066
3067 - compare two historical versions of a directory, with rename info::
3067 - compare two historical versions of a directory, with rename info::
3068
3068
3069 hg diff --git -r 1.0:1.2 lib/
3069 hg diff --git -r 1.0:1.2 lib/
3070
3070
3071 - get change stats relative to the last change on some date::
3071 - get change stats relative to the last change on some date::
3072
3072
3073 hg diff --stat -r "date('may 2')"
3073 hg diff --stat -r "date('may 2')"
3074
3074
3075 - diff all newly-added files that contain a keyword::
3075 - diff all newly-added files that contain a keyword::
3076
3076
3077 hg diff "set:added() and grep(GNU)"
3077 hg diff "set:added() and grep(GNU)"
3078
3078
3079 - compare a revision and its parents::
3079 - compare a revision and its parents::
3080
3080
3081 hg diff -c 9353 # compare against first parent
3081 hg diff -c 9353 # compare against first parent
3082 hg diff -r 9353^:9353 # same using revset syntax
3082 hg diff -r 9353^:9353 # same using revset syntax
3083 hg diff -r 9353^2:9353 # compare against the second parent
3083 hg diff -r 9353^2:9353 # compare against the second parent
3084
3084
3085 Returns 0 on success.
3085 Returns 0 on success.
3086 """
3086 """
3087
3087
3088 revs = opts.get('rev')
3088 revs = opts.get('rev')
3089 change = opts.get('change')
3089 change = opts.get('change')
3090 stat = opts.get('stat')
3090 stat = opts.get('stat')
3091 reverse = opts.get('reverse')
3091 reverse = opts.get('reverse')
3092
3092
3093 if revs and change:
3093 if revs and change:
3094 msg = _('cannot specify --rev and --change at the same time')
3094 msg = _('cannot specify --rev and --change at the same time')
3095 raise error.Abort(msg)
3095 raise error.Abort(msg)
3096 elif change:
3096 elif change:
3097 node2 = scmutil.revsingle(repo, change, None).node()
3097 node2 = scmutil.revsingle(repo, change, None).node()
3098 node1 = repo[node2].p1().node()
3098 node1 = repo[node2].p1().node()
3099 else:
3099 else:
3100 node1, node2 = scmutil.revpair(repo, revs)
3100 node1, node2 = scmutil.revpair(repo, revs)
3101
3101
3102 if reverse:
3102 if reverse:
3103 node1, node2 = node2, node1
3103 node1, node2 = node2, node1
3104
3104
3105 diffopts = patch.diffallopts(ui, opts)
3105 diffopts = patch.diffallopts(ui, opts)
3106 m = scmutil.match(repo[node2], pats, opts)
3106 m = scmutil.match(repo[node2], pats, opts)
3107 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3107 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat,
3108 listsubrepos=opts.get('subrepos'),
3108 listsubrepos=opts.get('subrepos'),
3109 root=opts.get('root'))
3109 root=opts.get('root'))
3110
3110
3111 @command('^export',
3111 @command('^export',
3112 [('o', 'output', '',
3112 [('o', 'output', '',
3113 _('print output to file with formatted name'), _('FORMAT')),
3113 _('print output to file with formatted name'), _('FORMAT')),
3114 ('', 'switch-parent', None, _('diff against the second parent')),
3114 ('', 'switch-parent', None, _('diff against the second parent')),
3115 ('r', 'rev', [], _('revisions to export'), _('REV')),
3115 ('r', 'rev', [], _('revisions to export'), _('REV')),
3116 ] + diffopts,
3116 ] + diffopts,
3117 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3117 _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...'))
3118 def export(ui, repo, *changesets, **opts):
3118 def export(ui, repo, *changesets, **opts):
3119 """dump the header and diffs for one or more changesets
3119 """dump the header and diffs for one or more changesets
3120
3120
3121 Print the changeset header and diffs for one or more revisions.
3121 Print the changeset header and diffs for one or more revisions.
3122 If no revision is given, the parent of the working directory is used.
3122 If no revision is given, the parent of the working directory is used.
3123
3123
3124 The information shown in the changeset header is: author, date,
3124 The information shown in the changeset header is: author, date,
3125 branch name (if non-default), changeset hash, parent(s) and commit
3125 branch name (if non-default), changeset hash, parent(s) and commit
3126 comment.
3126 comment.
3127
3127
3128 .. note::
3128 .. note::
3129
3129
3130 :hg:`export` may generate unexpected diff output for merge
3130 :hg:`export` may generate unexpected diff output for merge
3131 changesets, as it will compare the merge changeset against its
3131 changesets, as it will compare the merge changeset against its
3132 first parent only.
3132 first parent only.
3133
3133
3134 Output may be to a file, in which case the name of the file is
3134 Output may be to a file, in which case the name of the file is
3135 given using a format string. The formatting rules are as follows:
3135 given using a format string. The formatting rules are as follows:
3136
3136
3137 :``%%``: literal "%" character
3137 :``%%``: literal "%" character
3138 :``%H``: changeset hash (40 hexadecimal digits)
3138 :``%H``: changeset hash (40 hexadecimal digits)
3139 :``%N``: number of patches being generated
3139 :``%N``: number of patches being generated
3140 :``%R``: changeset revision number
3140 :``%R``: changeset revision number
3141 :``%b``: basename of the exporting repository
3141 :``%b``: basename of the exporting repository
3142 :``%h``: short-form changeset hash (12 hexadecimal digits)
3142 :``%h``: short-form changeset hash (12 hexadecimal digits)
3143 :``%m``: first line of the commit message (only alphanumeric characters)
3143 :``%m``: first line of the commit message (only alphanumeric characters)
3144 :``%n``: zero-padded sequence number, starting at 1
3144 :``%n``: zero-padded sequence number, starting at 1
3145 :``%r``: zero-padded changeset revision number
3145 :``%r``: zero-padded changeset revision number
3146
3146
3147 Without the -a/--text option, export will avoid generating diffs
3147 Without the -a/--text option, export will avoid generating diffs
3148 of files it detects as binary. With -a, export will generate a
3148 of files it detects as binary. With -a, export will generate a
3149 diff anyway, probably with undesirable results.
3149 diff anyway, probably with undesirable results.
3150
3150
3151 Use the -g/--git option to generate diffs in the git extended diff
3151 Use the -g/--git option to generate diffs in the git extended diff
3152 format. See :hg:`help diffs` for more information.
3152 format. See :hg:`help diffs` for more information.
3153
3153
3154 With the --switch-parent option, the diff will be against the
3154 With the --switch-parent option, the diff will be against the
3155 second parent. It can be useful to review a merge.
3155 second parent. It can be useful to review a merge.
3156
3156
3157 .. container:: verbose
3157 .. container:: verbose
3158
3158
3159 Examples:
3159 Examples:
3160
3160
3161 - use export and import to transplant a bugfix to the current
3161 - use export and import to transplant a bugfix to the current
3162 branch::
3162 branch::
3163
3163
3164 hg export -r 9353 | hg import -
3164 hg export -r 9353 | hg import -
3165
3165
3166 - export all the changesets between two revisions to a file with
3166 - export all the changesets between two revisions to a file with
3167 rename information::
3167 rename information::
3168
3168
3169 hg export --git -r 123:150 > changes.txt
3169 hg export --git -r 123:150 > changes.txt
3170
3170
3171 - split outgoing changes into a series of patches with
3171 - split outgoing changes into a series of patches with
3172 descriptive names::
3172 descriptive names::
3173
3173
3174 hg export -r "outgoing()" -o "%n-%m.patch"
3174 hg export -r "outgoing()" -o "%n-%m.patch"
3175
3175
3176 Returns 0 on success.
3176 Returns 0 on success.
3177 """
3177 """
3178 changesets += tuple(opts.get('rev', []))
3178 changesets += tuple(opts.get('rev', []))
3179 if not changesets:
3179 if not changesets:
3180 changesets = ['.']
3180 changesets = ['.']
3181 revs = scmutil.revrange(repo, changesets)
3181 revs = scmutil.revrange(repo, changesets)
3182 if not revs:
3182 if not revs:
3183 raise error.Abort(_("export requires at least one changeset"))
3183 raise error.Abort(_("export requires at least one changeset"))
3184 if len(revs) > 1:
3184 if len(revs) > 1:
3185 ui.note(_('exporting patches:\n'))
3185 ui.note(_('exporting patches:\n'))
3186 else:
3186 else:
3187 ui.note(_('exporting patch:\n'))
3187 ui.note(_('exporting patch:\n'))
3188 cmdutil.export(repo, revs, template=opts.get('output'),
3188 cmdutil.export(repo, revs, template=opts.get('output'),
3189 switch_parent=opts.get('switch_parent'),
3189 switch_parent=opts.get('switch_parent'),
3190 opts=patch.diffallopts(ui, opts))
3190 opts=patch.diffallopts(ui, opts))
3191
3191
3192 @command('files',
3192 @command('files',
3193 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3193 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
3194 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3194 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
3195 ] + walkopts + formatteropts + subrepoopts,
3195 ] + walkopts + formatteropts + subrepoopts,
3196 _('[OPTION]... [FILE]...'))
3196 _('[OPTION]... [FILE]...'))
3197 def files(ui, repo, *pats, **opts):
3197 def files(ui, repo, *pats, **opts):
3198 """list tracked files
3198 """list tracked files
3199
3199
3200 Print files under Mercurial control in the working directory or
3200 Print files under Mercurial control in the working directory or
3201 specified revision for given files (excluding removed files).
3201 specified revision for given files (excluding removed files).
3202 Files can be specified as filenames or filesets.
3202 Files can be specified as filenames or filesets.
3203
3203
3204 If no files are given to match, this command prints the names
3204 If no files are given to match, this command prints the names
3205 of all files under Mercurial control.
3205 of all files under Mercurial control.
3206
3206
3207 .. container:: verbose
3207 .. container:: verbose
3208
3208
3209 Examples:
3209 Examples:
3210
3210
3211 - list all files under the current directory::
3211 - list all files under the current directory::
3212
3212
3213 hg files .
3213 hg files .
3214
3214
3215 - shows sizes and flags for current revision::
3215 - shows sizes and flags for current revision::
3216
3216
3217 hg files -vr .
3217 hg files -vr .
3218
3218
3219 - list all files named README::
3219 - list all files named README::
3220
3220
3221 hg files -I "**/README"
3221 hg files -I "**/README"
3222
3222
3223 - list all binary files::
3223 - list all binary files::
3224
3224
3225 hg files "set:binary()"
3225 hg files "set:binary()"
3226
3226
3227 - find files containing a regular expression::
3227 - find files containing a regular expression::
3228
3228
3229 hg files "set:grep('bob')"
3229 hg files "set:grep('bob')"
3230
3230
3231 - search tracked file contents with xargs and grep::
3231 - search tracked file contents with xargs and grep::
3232
3232
3233 hg files -0 | xargs -0 grep foo
3233 hg files -0 | xargs -0 grep foo
3234
3234
3235 See :hg:`help patterns` and :hg:`help filesets` for more information
3235 See :hg:`help patterns` and :hg:`help filesets` for more information
3236 on specifying file patterns.
3236 on specifying file patterns.
3237
3237
3238 Returns 0 if a match is found, 1 otherwise.
3238 Returns 0 if a match is found, 1 otherwise.
3239
3239
3240 """
3240 """
3241 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3241 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
3242
3242
3243 end = '\n'
3243 end = '\n'
3244 if opts.get('print0'):
3244 if opts.get('print0'):
3245 end = '\0'
3245 end = '\0'
3246 fmt = '%s' + end
3246 fmt = '%s' + end
3247
3247
3248 m = scmutil.match(ctx, pats, opts)
3248 m = scmutil.match(ctx, pats, opts)
3249 with ui.formatter('files', opts) as fm:
3249 with ui.formatter('files', opts) as fm:
3250 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3250 return cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos'))
3251
3251
3252 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3252 @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True)
3253 def forget(ui, repo, *pats, **opts):
3253 def forget(ui, repo, *pats, **opts):
3254 """forget the specified files on the next commit
3254 """forget the specified files on the next commit
3255
3255
3256 Mark the specified files so they will no longer be tracked
3256 Mark the specified files so they will no longer be tracked
3257 after the next commit.
3257 after the next commit.
3258
3258
3259 This only removes files from the current branch, not from the
3259 This only removes files from the current branch, not from the
3260 entire project history, and it does not delete them from the
3260 entire project history, and it does not delete them from the
3261 working directory.
3261 working directory.
3262
3262
3263 To delete the file from the working directory, see :hg:`remove`.
3263 To delete the file from the working directory, see :hg:`remove`.
3264
3264
3265 To undo a forget before the next commit, see :hg:`add`.
3265 To undo a forget before the next commit, see :hg:`add`.
3266
3266
3267 .. container:: verbose
3267 .. container:: verbose
3268
3268
3269 Examples:
3269 Examples:
3270
3270
3271 - forget newly-added binary files::
3271 - forget newly-added binary files::
3272
3272
3273 hg forget "set:added() and binary()"
3273 hg forget "set:added() and binary()"
3274
3274
3275 - forget files that would be excluded by .hgignore::
3275 - forget files that would be excluded by .hgignore::
3276
3276
3277 hg forget "set:hgignore()"
3277 hg forget "set:hgignore()"
3278
3278
3279 Returns 0 on success.
3279 Returns 0 on success.
3280 """
3280 """
3281
3281
3282 if not pats:
3282 if not pats:
3283 raise error.Abort(_('no files specified'))
3283 raise error.Abort(_('no files specified'))
3284
3284
3285 m = scmutil.match(repo[None], pats, opts)
3285 m = scmutil.match(repo[None], pats, opts)
3286 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3286 rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
3287 return rejected and 1 or 0
3287 return rejected and 1 or 0
3288
3288
3289 @command(
3289 @command(
3290 'graft',
3290 'graft',
3291 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3291 [('r', 'rev', [], _('revisions to graft'), _('REV')),
3292 ('c', 'continue', False, _('resume interrupted graft')),
3292 ('c', 'continue', False, _('resume interrupted graft')),
3293 ('e', 'edit', False, _('invoke editor on commit messages')),
3293 ('e', 'edit', False, _('invoke editor on commit messages')),
3294 ('', 'log', None, _('append graft info to log message')),
3294 ('', 'log', None, _('append graft info to log message')),
3295 ('f', 'force', False, _('force graft')),
3295 ('f', 'force', False, _('force graft')),
3296 ('D', 'currentdate', False,
3296 ('D', 'currentdate', False,
3297 _('record the current date as commit date')),
3297 _('record the current date as commit date')),
3298 ('U', 'currentuser', False,
3298 ('U', 'currentuser', False,
3299 _('record the current user as committer'), _('DATE'))]
3299 _('record the current user as committer'), _('DATE'))]
3300 + commitopts2 + mergetoolopts + dryrunopts,
3300 + commitopts2 + mergetoolopts + dryrunopts,
3301 _('[OPTION]... [-r REV]... REV...'))
3301 _('[OPTION]... [-r REV]... REV...'))
3302 def graft(ui, repo, *revs, **opts):
3302 def graft(ui, repo, *revs, **opts):
3303 '''copy changes from other branches onto the current branch
3303 '''copy changes from other branches onto the current branch
3304
3304
3305 This command uses Mercurial's merge logic to copy individual
3305 This command uses Mercurial's merge logic to copy individual
3306 changes from other branches without merging branches in the
3306 changes from other branches without merging branches in the
3307 history graph. This is sometimes known as 'backporting' or
3307 history graph. This is sometimes known as 'backporting' or
3308 'cherry-picking'. By default, graft will copy user, date, and
3308 'cherry-picking'. By default, graft will copy user, date, and
3309 description from the source changesets.
3309 description from the source changesets.
3310
3310
3311 Changesets that are ancestors of the current revision, that have
3311 Changesets that are ancestors of the current revision, that have
3312 already been grafted, or that are merges will be skipped.
3312 already been grafted, or that are merges will be skipped.
3313
3313
3314 If --log is specified, log messages will have a comment appended
3314 If --log is specified, log messages will have a comment appended
3315 of the form::
3315 of the form::
3316
3316
3317 (grafted from CHANGESETHASH)
3317 (grafted from CHANGESETHASH)
3318
3318
3319 If --force is specified, revisions will be grafted even if they
3319 If --force is specified, revisions will be grafted even if they
3320 are already ancestors of or have been grafted to the destination.
3320 are already ancestors of or have been grafted to the destination.
3321 This is useful when the revisions have since been backed out.
3321 This is useful when the revisions have since been backed out.
3322
3322
3323 If a graft merge results in conflicts, the graft process is
3323 If a graft merge results in conflicts, the graft process is
3324 interrupted so that the current merge can be manually resolved.
3324 interrupted so that the current merge can be manually resolved.
3325 Once all conflicts are addressed, the graft process can be
3325 Once all conflicts are addressed, the graft process can be
3326 continued with the -c/--continue option.
3326 continued with the -c/--continue option.
3327
3327
3328 .. note::
3328 .. note::
3329
3329
3330 The -c/--continue option does not reapply earlier options, except
3330 The -c/--continue option does not reapply earlier options, except
3331 for --force.
3331 for --force.
3332
3332
3333 .. container:: verbose
3333 .. container:: verbose
3334
3334
3335 Examples:
3335 Examples:
3336
3336
3337 - copy a single change to the stable branch and edit its description::
3337 - copy a single change to the stable branch and edit its description::
3338
3338
3339 hg update stable
3339 hg update stable
3340 hg graft --edit 9393
3340 hg graft --edit 9393
3341
3341
3342 - graft a range of changesets with one exception, updating dates::
3342 - graft a range of changesets with one exception, updating dates::
3343
3343
3344 hg graft -D "2085::2093 and not 2091"
3344 hg graft -D "2085::2093 and not 2091"
3345
3345
3346 - continue a graft after resolving conflicts::
3346 - continue a graft after resolving conflicts::
3347
3347
3348 hg graft -c
3348 hg graft -c
3349
3349
3350 - show the source of a grafted changeset::
3350 - show the source of a grafted changeset::
3351
3351
3352 hg log --debug -r .
3352 hg log --debug -r .
3353
3353
3354 - show revisions sorted by date::
3354 - show revisions sorted by date::
3355
3355
3356 hg log -r "sort(all(), date)"
3356 hg log -r "sort(all(), date)"
3357
3357
3358 See :hg:`help revisions` and :hg:`help revsets` for more about
3358 See :hg:`help revisions` and :hg:`help revsets` for more about
3359 specifying revisions.
3359 specifying revisions.
3360
3360
3361 Returns 0 on successful completion.
3361 Returns 0 on successful completion.
3362 '''
3362 '''
3363 with repo.wlock():
3363 with repo.wlock():
3364 return _dograft(ui, repo, *revs, **opts)
3364 return _dograft(ui, repo, *revs, **opts)
3365
3365
3366 def _dograft(ui, repo, *revs, **opts):
3366 def _dograft(ui, repo, *revs, **opts):
3367 if revs and opts.get('rev'):
3367 if revs and opts.get('rev'):
3368 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
3368 ui.warn(_('warning: inconsistent use of --rev might give unexpected '
3369 'revision ordering!\n'))
3369 'revision ordering!\n'))
3370
3370
3371 revs = list(revs)
3371 revs = list(revs)
3372 revs.extend(opts.get('rev'))
3372 revs.extend(opts.get('rev'))
3373
3373
3374 if not opts.get('user') and opts.get('currentuser'):
3374 if not opts.get('user') and opts.get('currentuser'):
3375 opts['user'] = ui.username()
3375 opts['user'] = ui.username()
3376 if not opts.get('date') and opts.get('currentdate'):
3376 if not opts.get('date') and opts.get('currentdate'):
3377 opts['date'] = "%d %d" % util.makedate()
3377 opts['date'] = "%d %d" % util.makedate()
3378
3378
3379 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3379 editor = cmdutil.getcommiteditor(editform='graft', **opts)
3380
3380
3381 cont = False
3381 cont = False
3382 if opts.get('continue'):
3382 if opts.get('continue'):
3383 cont = True
3383 cont = True
3384 if revs:
3384 if revs:
3385 raise error.Abort(_("can't specify --continue and revisions"))
3385 raise error.Abort(_("can't specify --continue and revisions"))
3386 # read in unfinished revisions
3386 # read in unfinished revisions
3387 try:
3387 try:
3388 nodes = repo.vfs.read('graftstate').splitlines()
3388 nodes = repo.vfs.read('graftstate').splitlines()
3389 revs = [repo[node].rev() for node in nodes]
3389 revs = [repo[node].rev() for node in nodes]
3390 except IOError as inst:
3390 except IOError as inst:
3391 if inst.errno != errno.ENOENT:
3391 if inst.errno != errno.ENOENT:
3392 raise
3392 raise
3393 cmdutil.wrongtooltocontinue(repo, _('graft'))
3393 cmdutil.wrongtooltocontinue(repo, _('graft'))
3394 else:
3394 else:
3395 cmdutil.checkunfinished(repo)
3395 cmdutil.checkunfinished(repo)
3396 cmdutil.bailifchanged(repo)
3396 cmdutil.bailifchanged(repo)
3397 if not revs:
3397 if not revs:
3398 raise error.Abort(_('no revisions specified'))
3398 raise error.Abort(_('no revisions specified'))
3399 revs = scmutil.revrange(repo, revs)
3399 revs = scmutil.revrange(repo, revs)
3400
3400
3401 skipped = set()
3401 skipped = set()
3402 # check for merges
3402 # check for merges
3403 for rev in repo.revs('%ld and merge()', revs):
3403 for rev in repo.revs('%ld and merge()', revs):
3404 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3404 ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
3405 skipped.add(rev)
3405 skipped.add(rev)
3406 revs = [r for r in revs if r not in skipped]
3406 revs = [r for r in revs if r not in skipped]
3407 if not revs:
3407 if not revs:
3408 return -1
3408 return -1
3409
3409
3410 # Don't check in the --continue case, in effect retaining --force across
3410 # Don't check in the --continue case, in effect retaining --force across
3411 # --continues. That's because without --force, any revisions we decided to
3411 # --continues. That's because without --force, any revisions we decided to
3412 # skip would have been filtered out here, so they wouldn't have made their
3412 # skip would have been filtered out here, so they wouldn't have made their
3413 # way to the graftstate. With --force, any revisions we would have otherwise
3413 # way to the graftstate. With --force, any revisions we would have otherwise
3414 # skipped would not have been filtered out, and if they hadn't been applied
3414 # skipped would not have been filtered out, and if they hadn't been applied
3415 # already, they'd have been in the graftstate.
3415 # already, they'd have been in the graftstate.
3416 if not (cont or opts.get('force')):
3416 if not (cont or opts.get('force')):
3417 # check for ancestors of dest branch
3417 # check for ancestors of dest branch
3418 crev = repo['.'].rev()
3418 crev = repo['.'].rev()
3419 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3419 ancestors = repo.changelog.ancestors([crev], inclusive=True)
3420 # XXX make this lazy in the future
3420 # XXX make this lazy in the future
3421 # don't mutate while iterating, create a copy
3421 # don't mutate while iterating, create a copy
3422 for rev in list(revs):
3422 for rev in list(revs):
3423 if rev in ancestors:
3423 if rev in ancestors:
3424 ui.warn(_('skipping ancestor revision %d:%s\n') %
3424 ui.warn(_('skipping ancestor revision %d:%s\n') %
3425 (rev, repo[rev]))
3425 (rev, repo[rev]))
3426 # XXX remove on list is slow
3426 # XXX remove on list is slow
3427 revs.remove(rev)
3427 revs.remove(rev)
3428 if not revs:
3428 if not revs:
3429 return -1
3429 return -1
3430
3430
3431 # analyze revs for earlier grafts
3431 # analyze revs for earlier grafts
3432 ids = {}
3432 ids = {}
3433 for ctx in repo.set("%ld", revs):
3433 for ctx in repo.set("%ld", revs):
3434 ids[ctx.hex()] = ctx.rev()
3434 ids[ctx.hex()] = ctx.rev()
3435 n = ctx.extra().get('source')
3435 n = ctx.extra().get('source')
3436 if n:
3436 if n:
3437 ids[n] = ctx.rev()
3437 ids[n] = ctx.rev()
3438
3438
3439 # check ancestors for earlier grafts
3439 # check ancestors for earlier grafts
3440 ui.debug('scanning for duplicate grafts\n')
3440 ui.debug('scanning for duplicate grafts\n')
3441
3441
3442 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3442 for rev in repo.changelog.findmissingrevs(revs, [crev]):
3443 ctx = repo[rev]
3443 ctx = repo[rev]
3444 n = ctx.extra().get('source')
3444 n = ctx.extra().get('source')
3445 if n in ids:
3445 if n in ids:
3446 try:
3446 try:
3447 r = repo[n].rev()
3447 r = repo[n].rev()
3448 except error.RepoLookupError:
3448 except error.RepoLookupError:
3449 r = None
3449 r = None
3450 if r in revs:
3450 if r in revs:
3451 ui.warn(_('skipping revision %d:%s '
3451 ui.warn(_('skipping revision %d:%s '
3452 '(already grafted to %d:%s)\n')
3452 '(already grafted to %d:%s)\n')
3453 % (r, repo[r], rev, ctx))
3453 % (r, repo[r], rev, ctx))
3454 revs.remove(r)
3454 revs.remove(r)
3455 elif ids[n] in revs:
3455 elif ids[n] in revs:
3456 if r is None:
3456 if r is None:
3457 ui.warn(_('skipping already grafted revision %d:%s '
3457 ui.warn(_('skipping already grafted revision %d:%s '
3458 '(%d:%s also has unknown origin %s)\n')
3458 '(%d:%s also has unknown origin %s)\n')
3459 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3459 % (ids[n], repo[ids[n]], rev, ctx, n[:12]))
3460 else:
3460 else:
3461 ui.warn(_('skipping already grafted revision %d:%s '
3461 ui.warn(_('skipping already grafted revision %d:%s '
3462 '(%d:%s also has origin %d:%s)\n')
3462 '(%d:%s also has origin %d:%s)\n')
3463 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3463 % (ids[n], repo[ids[n]], rev, ctx, r, n[:12]))
3464 revs.remove(ids[n])
3464 revs.remove(ids[n])
3465 elif ctx.hex() in ids:
3465 elif ctx.hex() in ids:
3466 r = ids[ctx.hex()]
3466 r = ids[ctx.hex()]
3467 ui.warn(_('skipping already grafted revision %d:%s '
3467 ui.warn(_('skipping already grafted revision %d:%s '
3468 '(was grafted from %d:%s)\n') %
3468 '(was grafted from %d:%s)\n') %
3469 (r, repo[r], rev, ctx))
3469 (r, repo[r], rev, ctx))
3470 revs.remove(r)
3470 revs.remove(r)
3471 if not revs:
3471 if not revs:
3472 return -1
3472 return -1
3473
3473
3474 for pos, ctx in enumerate(repo.set("%ld", revs)):
3474 for pos, ctx in enumerate(repo.set("%ld", revs)):
3475 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3475 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
3476 ctx.description().split('\n', 1)[0])
3476 ctx.description().split('\n', 1)[0])
3477 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3477 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
3478 if names:
3478 if names:
3479 desc += ' (%s)' % ' '.join(names)
3479 desc += ' (%s)' % ' '.join(names)
3480 ui.status(_('grafting %s\n') % desc)
3480 ui.status(_('grafting %s\n') % desc)
3481 if opts.get('dry_run'):
3481 if opts.get('dry_run'):
3482 continue
3482 continue
3483
3483
3484 source = ctx.extra().get('source')
3484 source = ctx.extra().get('source')
3485 extra = {}
3485 extra = {}
3486 if source:
3486 if source:
3487 extra['source'] = source
3487 extra['source'] = source
3488 extra['intermediate-source'] = ctx.hex()
3488 extra['intermediate-source'] = ctx.hex()
3489 else:
3489 else:
3490 extra['source'] = ctx.hex()
3490 extra['source'] = ctx.hex()
3491 user = ctx.user()
3491 user = ctx.user()
3492 if opts.get('user'):
3492 if opts.get('user'):
3493 user = opts['user']
3493 user = opts['user']
3494 date = ctx.date()
3494 date = ctx.date()
3495 if opts.get('date'):
3495 if opts.get('date'):
3496 date = opts['date']
3496 date = opts['date']
3497 message = ctx.description()
3497 message = ctx.description()
3498 if opts.get('log'):
3498 if opts.get('log'):
3499 message += '\n(grafted from %s)' % ctx.hex()
3499 message += '\n(grafted from %s)' % ctx.hex()
3500
3500
3501 # we don't merge the first commit when continuing
3501 # we don't merge the first commit when continuing
3502 if not cont:
3502 if not cont:
3503 # perform the graft merge with p1(rev) as 'ancestor'
3503 # perform the graft merge with p1(rev) as 'ancestor'
3504 try:
3504 try:
3505 # ui.forcemerge is an internal variable, do not document
3505 # ui.forcemerge is an internal variable, do not document
3506 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3506 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
3507 'graft')
3507 'graft')
3508 stats = mergemod.graft(repo, ctx, ctx.p1(),
3508 stats = mergemod.graft(repo, ctx, ctx.p1(),
3509 ['local', 'graft'])
3509 ['local', 'graft'])
3510 finally:
3510 finally:
3511 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3511 repo.ui.setconfig('ui', 'forcemerge', '', 'graft')
3512 # report any conflicts
3512 # report any conflicts
3513 if stats and stats[3] > 0:
3513 if stats and stats[3] > 0:
3514 # write out state for --continue
3514 # write out state for --continue
3515 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3515 nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
3516 repo.vfs.write('graftstate', ''.join(nodelines))
3516 repo.vfs.write('graftstate', ''.join(nodelines))
3517 extra = ''
3517 extra = ''
3518 if opts.get('user'):
3518 if opts.get('user'):
3519 extra += ' --user %s' % util.shellquote(opts['user'])
3519 extra += ' --user %s' % util.shellquote(opts['user'])
3520 if opts.get('date'):
3520 if opts.get('date'):
3521 extra += ' --date %s' % util.shellquote(opts['date'])
3521 extra += ' --date %s' % util.shellquote(opts['date'])
3522 if opts.get('log'):
3522 if opts.get('log'):
3523 extra += ' --log'
3523 extra += ' --log'
3524 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
3524 hint=_("use 'hg resolve' and 'hg graft --continue%s'") % extra
3525 raise error.Abort(
3525 raise error.Abort(
3526 _("unresolved conflicts, can't continue"),
3526 _("unresolved conflicts, can't continue"),
3527 hint=hint)
3527 hint=hint)
3528 else:
3528 else:
3529 cont = False
3529 cont = False
3530
3530
3531 # commit
3531 # commit
3532 node = repo.commit(text=message, user=user,
3532 node = repo.commit(text=message, user=user,
3533 date=date, extra=extra, editor=editor)
3533 date=date, extra=extra, editor=editor)
3534 if node is None:
3534 if node is None:
3535 ui.warn(
3535 ui.warn(
3536 _('note: graft of %d:%s created no changes to commit\n') %
3536 _('note: graft of %d:%s created no changes to commit\n') %
3537 (ctx.rev(), ctx))
3537 (ctx.rev(), ctx))
3538
3538
3539 # remove state when we complete successfully
3539 # remove state when we complete successfully
3540 if not opts.get('dry_run'):
3540 if not opts.get('dry_run'):
3541 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3541 util.unlinkpath(repo.join('graftstate'), ignoremissing=True)
3542
3542
3543 return 0
3543 return 0
3544
3544
3545 @command('grep',
3545 @command('grep',
3546 [('0', 'print0', None, _('end fields with NUL')),
3546 [('0', 'print0', None, _('end fields with NUL')),
3547 ('', 'all', None, _('print all revisions that match')),
3547 ('', 'all', None, _('print all revisions that match')),
3548 ('a', 'text', None, _('treat all files as text')),
3548 ('a', 'text', None, _('treat all files as text')),
3549 ('f', 'follow', None,
3549 ('f', 'follow', None,
3550 _('follow changeset history,'
3550 _('follow changeset history,'
3551 ' or file history across copies and renames')),
3551 ' or file history across copies and renames')),
3552 ('i', 'ignore-case', None, _('ignore case when matching')),
3552 ('i', 'ignore-case', None, _('ignore case when matching')),
3553 ('l', 'files-with-matches', None,
3553 ('l', 'files-with-matches', None,
3554 _('print only filenames and revisions that match')),
3554 _('print only filenames and revisions that match')),
3555 ('n', 'line-number', None, _('print matching line numbers')),
3555 ('n', 'line-number', None, _('print matching line numbers')),
3556 ('r', 'rev', [],
3556 ('r', 'rev', [],
3557 _('only search files changed within revision range'), _('REV')),
3557 _('only search files changed within revision range'), _('REV')),
3558 ('u', 'user', None, _('list the author (long with -v)')),
3558 ('u', 'user', None, _('list the author (long with -v)')),
3559 ('d', 'date', None, _('list the date (short with -q)')),
3559 ('d', 'date', None, _('list the date (short with -q)')),
3560 ] + formatteropts + walkopts,
3560 ] + formatteropts + walkopts,
3561 _('[OPTION]... PATTERN [FILE]...'),
3561 _('[OPTION]... PATTERN [FILE]...'),
3562 inferrepo=True)
3562 inferrepo=True)
3563 def grep(ui, repo, pattern, *pats, **opts):
3563 def grep(ui, repo, pattern, *pats, **opts):
3564 """search revision history for a pattern in specified files
3564 """search revision history for a pattern in specified files
3565
3565
3566 Search revision history for a regular expression in the specified
3566 Search revision history for a regular expression in the specified
3567 files or the entire project.
3567 files or the entire project.
3568
3568
3569 By default, grep prints the most recent revision number for each
3569 By default, grep prints the most recent revision number for each
3570 file in which it finds a match. To get it to print every revision
3570 file in which it finds a match. To get it to print every revision
3571 that contains a change in match status ("-" for a match that becomes
3571 that contains a change in match status ("-" for a match that becomes
3572 a non-match, or "+" for a non-match that becomes a match), use the
3572 a non-match, or "+" for a non-match that becomes a match), use the
3573 --all flag.
3573 --all flag.
3574
3574
3575 PATTERN can be any Python (roughly Perl-compatible) regular
3575 PATTERN can be any Python (roughly Perl-compatible) regular
3576 expression.
3576 expression.
3577
3577
3578 If no FILEs are specified (and -f/--follow isn't set), all files in
3578 If no FILEs are specified (and -f/--follow isn't set), all files in
3579 the repository are searched, including those that don't exist in the
3579 the repository are searched, including those that don't exist in the
3580 current branch or have been deleted in a prior changeset.
3580 current branch or have been deleted in a prior changeset.
3581
3581
3582 Returns 0 if a match is found, 1 otherwise.
3582 Returns 0 if a match is found, 1 otherwise.
3583 """
3583 """
3584 reflags = re.M
3584 reflags = re.M
3585 if opts.get('ignore_case'):
3585 if opts.get('ignore_case'):
3586 reflags |= re.I
3586 reflags |= re.I
3587 try:
3587 try:
3588 regexp = util.re.compile(pattern, reflags)
3588 regexp = util.re.compile(pattern, reflags)
3589 except re.error as inst:
3589 except re.error as inst:
3590 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3590 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
3591 return 1
3591 return 1
3592 sep, eol = ':', '\n'
3592 sep, eol = ':', '\n'
3593 if opts.get('print0'):
3593 if opts.get('print0'):
3594 sep = eol = '\0'
3594 sep = eol = '\0'
3595
3595
3596 getfile = util.lrucachefunc(repo.file)
3596 getfile = util.lrucachefunc(repo.file)
3597
3597
3598 def matchlines(body):
3598 def matchlines(body):
3599 begin = 0
3599 begin = 0
3600 linenum = 0
3600 linenum = 0
3601 while begin < len(body):
3601 while begin < len(body):
3602 match = regexp.search(body, begin)
3602 match = regexp.search(body, begin)
3603 if not match:
3603 if not match:
3604 break
3604 break
3605 mstart, mend = match.span()
3605 mstart, mend = match.span()
3606 linenum += body.count('\n', begin, mstart) + 1
3606 linenum += body.count('\n', begin, mstart) + 1
3607 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3607 lstart = body.rfind('\n', begin, mstart) + 1 or begin
3608 begin = body.find('\n', mend) + 1 or len(body) + 1
3608 begin = body.find('\n', mend) + 1 or len(body) + 1
3609 lend = begin - 1
3609 lend = begin - 1
3610 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3610 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
3611
3611
3612 class linestate(object):
3612 class linestate(object):
3613 def __init__(self, line, linenum, colstart, colend):
3613 def __init__(self, line, linenum, colstart, colend):
3614 self.line = line
3614 self.line = line
3615 self.linenum = linenum
3615 self.linenum = linenum
3616 self.colstart = colstart
3616 self.colstart = colstart
3617 self.colend = colend
3617 self.colend = colend
3618
3618
3619 def __hash__(self):
3619 def __hash__(self):
3620 return hash((self.linenum, self.line))
3620 return hash((self.linenum, self.line))
3621
3621
3622 def __eq__(self, other):
3622 def __eq__(self, other):
3623 return self.line == other.line
3623 return self.line == other.line
3624
3624
3625 def findpos(self):
3625 def findpos(self):
3626 """Iterate all (start, end) indices of matches"""
3626 """Iterate all (start, end) indices of matches"""
3627 yield self.colstart, self.colend
3627 yield self.colstart, self.colend
3628 p = self.colend
3628 p = self.colend
3629 while p < len(self.line):
3629 while p < len(self.line):
3630 m = regexp.search(self.line, p)
3630 m = regexp.search(self.line, p)
3631 if not m:
3631 if not m:
3632 break
3632 break
3633 yield m.span()
3633 yield m.span()
3634 p = m.end()
3634 p = m.end()
3635
3635
3636 matches = {}
3636 matches = {}
3637 copies = {}
3637 copies = {}
3638 def grepbody(fn, rev, body):
3638 def grepbody(fn, rev, body):
3639 matches[rev].setdefault(fn, [])
3639 matches[rev].setdefault(fn, [])
3640 m = matches[rev][fn]
3640 m = matches[rev][fn]
3641 for lnum, cstart, cend, line in matchlines(body):
3641 for lnum, cstart, cend, line in matchlines(body):
3642 s = linestate(line, lnum, cstart, cend)
3642 s = linestate(line, lnum, cstart, cend)
3643 m.append(s)
3643 m.append(s)
3644
3644
3645 def difflinestates(a, b):
3645 def difflinestates(a, b):
3646 sm = difflib.SequenceMatcher(None, a, b)
3646 sm = difflib.SequenceMatcher(None, a, b)
3647 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3647 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
3648 if tag == 'insert':
3648 if tag == 'insert':
3649 for i in xrange(blo, bhi):
3649 for i in xrange(blo, bhi):
3650 yield ('+', b[i])
3650 yield ('+', b[i])
3651 elif tag == 'delete':
3651 elif tag == 'delete':
3652 for i in xrange(alo, ahi):
3652 for i in xrange(alo, ahi):
3653 yield ('-', a[i])
3653 yield ('-', a[i])
3654 elif tag == 'replace':
3654 elif tag == 'replace':
3655 for i in xrange(alo, ahi):
3655 for i in xrange(alo, ahi):
3656 yield ('-', a[i])
3656 yield ('-', a[i])
3657 for i in xrange(blo, bhi):
3657 for i in xrange(blo, bhi):
3658 yield ('+', b[i])
3658 yield ('+', b[i])
3659
3659
3660 def display(fm, fn, ctx, pstates, states):
3660 def display(fm, fn, ctx, pstates, states):
3661 rev = ctx.rev()
3661 rev = ctx.rev()
3662 if fm.isplain():
3662 if fm.isplain():
3663 formatuser = ui.shortuser
3663 formatuser = ui.shortuser
3664 else:
3664 else:
3665 formatuser = str
3665 formatuser = str
3666 if ui.quiet:
3666 if ui.quiet:
3667 datefmt = '%Y-%m-%d'
3667 datefmt = '%Y-%m-%d'
3668 else:
3668 else:
3669 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
3669 datefmt = '%a %b %d %H:%M:%S %Y %1%2'
3670 found = False
3670 found = False
3671 @util.cachefunc
3671 @util.cachefunc
3672 def binary():
3672 def binary():
3673 flog = getfile(fn)
3673 flog = getfile(fn)
3674 return util.binary(flog.read(ctx.filenode(fn)))
3674 return util.binary(flog.read(ctx.filenode(fn)))
3675
3675
3676 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
3676 fieldnamemap = {'filename': 'file', 'linenumber': 'line_number'}
3677 if opts.get('all'):
3677 if opts.get('all'):
3678 iter = difflinestates(pstates, states)
3678 iter = difflinestates(pstates, states)
3679 else:
3679 else:
3680 iter = [('', l) for l in states]
3680 iter = [('', l) for l in states]
3681 for change, l in iter:
3681 for change, l in iter:
3682 fm.startitem()
3682 fm.startitem()
3683 fm.data(node=fm.hexfunc(ctx.node()))
3683 fm.data(node=fm.hexfunc(ctx.node()))
3684 cols = [
3684 cols = [
3685 ('filename', fn, True),
3685 ('filename', fn, True),
3686 ('rev', rev, True),
3686 ('rev', rev, True),
3687 ('linenumber', l.linenum, opts.get('line_number')),
3687 ('linenumber', l.linenum, opts.get('line_number')),
3688 ]
3688 ]
3689 if opts.get('all'):
3689 if opts.get('all'):
3690 cols.append(('change', change, True))
3690 cols.append(('change', change, True))
3691 cols.extend([
3691 cols.extend([
3692 ('user', formatuser(ctx.user()), opts.get('user')),
3692 ('user', formatuser(ctx.user()), opts.get('user')),
3693 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
3693 ('date', fm.formatdate(ctx.date(), datefmt), opts.get('date')),
3694 ])
3694 ])
3695 lastcol = next(name for name, data, cond in reversed(cols) if cond)
3695 lastcol = next(name for name, data, cond in reversed(cols) if cond)
3696 for name, data, cond in cols:
3696 for name, data, cond in cols:
3697 field = fieldnamemap.get(name, name)
3697 field = fieldnamemap.get(name, name)
3698 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
3698 fm.condwrite(cond, field, '%s', data, label='grep.%s' % name)
3699 if cond and name != lastcol:
3699 if cond and name != lastcol:
3700 fm.plain(sep, label='grep.sep')
3700 fm.plain(sep, label='grep.sep')
3701 if not opts.get('files_with_matches'):
3701 if not opts.get('files_with_matches'):
3702 fm.plain(sep, label='grep.sep')
3702 fm.plain(sep, label='grep.sep')
3703 if not opts.get('text') and binary():
3703 if not opts.get('text') and binary():
3704 fm.plain(_(" Binary file matches"))
3704 fm.plain(_(" Binary file matches"))
3705 else:
3705 else:
3706 displaymatches(fm.nested('texts'), l)
3706 displaymatches(fm.nested('texts'), l)
3707 fm.plain(eol)
3707 fm.plain(eol)
3708 found = True
3708 found = True
3709 if opts.get('files_with_matches'):
3709 if opts.get('files_with_matches'):
3710 break
3710 break
3711 return found
3711 return found
3712
3712
3713 def displaymatches(fm, l):
3713 def displaymatches(fm, l):
3714 p = 0
3714 p = 0
3715 for s, e in l.findpos():
3715 for s, e in l.findpos():
3716 if p < s:
3716 if p < s:
3717 fm.startitem()
3717 fm.startitem()
3718 fm.write('text', '%s', l.line[p:s])
3718 fm.write('text', '%s', l.line[p:s])
3719 fm.data(matched=False)
3719 fm.data(matched=False)
3720 fm.startitem()
3720 fm.startitem()
3721 fm.write('text', '%s', l.line[s:e], label='grep.match')
3721 fm.write('text', '%s', l.line[s:e], label='grep.match')
3722 fm.data(matched=True)
3722 fm.data(matched=True)
3723 p = e
3723 p = e
3724 if p < len(l.line):
3724 if p < len(l.line):
3725 fm.startitem()
3725 fm.startitem()
3726 fm.write('text', '%s', l.line[p:])
3726 fm.write('text', '%s', l.line[p:])
3727 fm.data(matched=False)
3727 fm.data(matched=False)
3728 fm.end()
3728 fm.end()
3729
3729
3730 skip = {}
3730 skip = {}
3731 revfiles = {}
3731 revfiles = {}
3732 matchfn = scmutil.match(repo[None], pats, opts)
3732 matchfn = scmutil.match(repo[None], pats, opts)
3733 found = False
3733 found = False
3734 follow = opts.get('follow')
3734 follow = opts.get('follow')
3735
3735
3736 def prep(ctx, fns):
3736 def prep(ctx, fns):
3737 rev = ctx.rev()
3737 rev = ctx.rev()
3738 pctx = ctx.p1()
3738 pctx = ctx.p1()
3739 parent = pctx.rev()
3739 parent = pctx.rev()
3740 matches.setdefault(rev, {})
3740 matches.setdefault(rev, {})
3741 matches.setdefault(parent, {})
3741 matches.setdefault(parent, {})
3742 files = revfiles.setdefault(rev, [])
3742 files = revfiles.setdefault(rev, [])
3743 for fn in fns:
3743 for fn in fns:
3744 flog = getfile(fn)
3744 flog = getfile(fn)
3745 try:
3745 try:
3746 fnode = ctx.filenode(fn)
3746 fnode = ctx.filenode(fn)
3747 except error.LookupError:
3747 except error.LookupError:
3748 continue
3748 continue
3749
3749
3750 copied = flog.renamed(fnode)
3750 copied = flog.renamed(fnode)
3751 copy = follow and copied and copied[0]
3751 copy = follow and copied and copied[0]
3752 if copy:
3752 if copy:
3753 copies.setdefault(rev, {})[fn] = copy
3753 copies.setdefault(rev, {})[fn] = copy
3754 if fn in skip:
3754 if fn in skip:
3755 if copy:
3755 if copy:
3756 skip[copy] = True
3756 skip[copy] = True
3757 continue
3757 continue
3758 files.append(fn)
3758 files.append(fn)
3759
3759
3760 if fn not in matches[rev]:
3760 if fn not in matches[rev]:
3761 grepbody(fn, rev, flog.read(fnode))
3761 grepbody(fn, rev, flog.read(fnode))
3762
3762
3763 pfn = copy or fn
3763 pfn = copy or fn
3764 if pfn not in matches[parent]:
3764 if pfn not in matches[parent]:
3765 try:
3765 try:
3766 fnode = pctx.filenode(pfn)
3766 fnode = pctx.filenode(pfn)
3767 grepbody(pfn, parent, flog.read(fnode))
3767 grepbody(pfn, parent, flog.read(fnode))
3768 except error.LookupError:
3768 except error.LookupError:
3769 pass
3769 pass
3770
3770
3771 fm = ui.formatter('grep', opts)
3771 fm = ui.formatter('grep', opts)
3772 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3772 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
3773 rev = ctx.rev()
3773 rev = ctx.rev()
3774 parent = ctx.p1().rev()
3774 parent = ctx.p1().rev()
3775 for fn in sorted(revfiles.get(rev, [])):
3775 for fn in sorted(revfiles.get(rev, [])):
3776 states = matches[rev][fn]
3776 states = matches[rev][fn]
3777 copy = copies.get(rev, {}).get(fn)
3777 copy = copies.get(rev, {}).get(fn)
3778 if fn in skip:
3778 if fn in skip:
3779 if copy:
3779 if copy:
3780 skip[copy] = True
3780 skip[copy] = True
3781 continue
3781 continue
3782 pstates = matches.get(parent, {}).get(copy or fn, [])
3782 pstates = matches.get(parent, {}).get(copy or fn, [])
3783 if pstates or states:
3783 if pstates or states:
3784 r = display(fm, fn, ctx, pstates, states)
3784 r = display(fm, fn, ctx, pstates, states)
3785 found = found or r
3785 found = found or r
3786 if r and not opts.get('all'):
3786 if r and not opts.get('all'):
3787 skip[fn] = True
3787 skip[fn] = True
3788 if copy:
3788 if copy:
3789 skip[copy] = True
3789 skip[copy] = True
3790 del matches[rev]
3790 del matches[rev]
3791 del revfiles[rev]
3791 del revfiles[rev]
3792 fm.end()
3792 fm.end()
3793
3793
3794 return not found
3794 return not found
3795
3795
3796 @command('heads',
3796 @command('heads',
3797 [('r', 'rev', '',
3797 [('r', 'rev', '',
3798 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3798 _('show only heads which are descendants of STARTREV'), _('STARTREV')),
3799 ('t', 'topo', False, _('show topological heads only')),
3799 ('t', 'topo', False, _('show topological heads only')),
3800 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3800 ('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
3801 ('c', 'closed', False, _('show normal and closed branch heads')),
3801 ('c', 'closed', False, _('show normal and closed branch heads')),
3802 ] + templateopts,
3802 ] + templateopts,
3803 _('[-ct] [-r STARTREV] [REV]...'))
3803 _('[-ct] [-r STARTREV] [REV]...'))
3804 def heads(ui, repo, *branchrevs, **opts):
3804 def heads(ui, repo, *branchrevs, **opts):
3805 """show branch heads
3805 """show branch heads
3806
3806
3807 With no arguments, show all open branch heads in the repository.
3807 With no arguments, show all open branch heads in the repository.
3808 Branch heads are changesets that have no descendants on the
3808 Branch heads are changesets that have no descendants on the
3809 same branch. They are where development generally takes place and
3809 same branch. They are where development generally takes place and
3810 are the usual targets for update and merge operations.
3810 are the usual targets for update and merge operations.
3811
3811
3812 If one or more REVs are given, only open branch heads on the
3812 If one or more REVs are given, only open branch heads on the
3813 branches associated with the specified changesets are shown. This
3813 branches associated with the specified changesets are shown. This
3814 means that you can use :hg:`heads .` to see the heads on the
3814 means that you can use :hg:`heads .` to see the heads on the
3815 currently checked-out branch.
3815 currently checked-out branch.
3816
3816
3817 If -c/--closed is specified, also show branch heads marked closed
3817 If -c/--closed is specified, also show branch heads marked closed
3818 (see :hg:`commit --close-branch`).
3818 (see :hg:`commit --close-branch`).
3819
3819
3820 If STARTREV is specified, only those heads that are descendants of
3820 If STARTREV is specified, only those heads that are descendants of
3821 STARTREV will be displayed.
3821 STARTREV will be displayed.
3822
3822
3823 If -t/--topo is specified, named branch mechanics will be ignored and only
3823 If -t/--topo is specified, named branch mechanics will be ignored and only
3824 topological heads (changesets with no children) will be shown.
3824 topological heads (changesets with no children) will be shown.
3825
3825
3826 Returns 0 if matching heads are found, 1 if not.
3826 Returns 0 if matching heads are found, 1 if not.
3827 """
3827 """
3828
3828
3829 start = None
3829 start = None
3830 if 'rev' in opts:
3830 if 'rev' in opts:
3831 start = scmutil.revsingle(repo, opts['rev'], None).node()
3831 start = scmutil.revsingle(repo, opts['rev'], None).node()
3832
3832
3833 if opts.get('topo'):
3833 if opts.get('topo'):
3834 heads = [repo[h] for h in repo.heads(start)]
3834 heads = [repo[h] for h in repo.heads(start)]
3835 else:
3835 else:
3836 heads = []
3836 heads = []
3837 for branch in repo.branchmap():
3837 for branch in repo.branchmap():
3838 heads += repo.branchheads(branch, start, opts.get('closed'))
3838 heads += repo.branchheads(branch, start, opts.get('closed'))
3839 heads = [repo[h] for h in heads]
3839 heads = [repo[h] for h in heads]
3840
3840
3841 if branchrevs:
3841 if branchrevs:
3842 branches = set(repo[br].branch() for br in branchrevs)
3842 branches = set(repo[br].branch() for br in branchrevs)
3843 heads = [h for h in heads if h.branch() in branches]
3843 heads = [h for h in heads if h.branch() in branches]
3844
3844
3845 if opts.get('active') and branchrevs:
3845 if opts.get('active') and branchrevs:
3846 dagheads = repo.heads(start)
3846 dagheads = repo.heads(start)
3847 heads = [h for h in heads if h.node() in dagheads]
3847 heads = [h for h in heads if h.node() in dagheads]
3848
3848
3849 if branchrevs:
3849 if branchrevs:
3850 haveheads = set(h.branch() for h in heads)
3850 haveheads = set(h.branch() for h in heads)
3851 if branches - haveheads:
3851 if branches - haveheads:
3852 headless = ', '.join(b for b in branches - haveheads)
3852 headless = ', '.join(b for b in branches - haveheads)
3853 msg = _('no open branch heads found on branches %s')
3853 msg = _('no open branch heads found on branches %s')
3854 if opts.get('rev'):
3854 if opts.get('rev'):
3855 msg += _(' (started at %s)') % opts['rev']
3855 msg += _(' (started at %s)') % opts['rev']
3856 ui.warn((msg + '\n') % headless)
3856 ui.warn((msg + '\n') % headless)
3857
3857
3858 if not heads:
3858 if not heads:
3859 return 1
3859 return 1
3860
3860
3861 heads = sorted(heads, key=lambda x: -x.rev())
3861 heads = sorted(heads, key=lambda x: -x.rev())
3862 displayer = cmdutil.show_changeset(ui, repo, opts)
3862 displayer = cmdutil.show_changeset(ui, repo, opts)
3863 for ctx in heads:
3863 for ctx in heads:
3864 displayer.show(ctx)
3864 displayer.show(ctx)
3865 displayer.close()
3865 displayer.close()
3866
3866
3867 @command('help',
3867 @command('help',
3868 [('e', 'extension', None, _('show only help for extensions')),
3868 [('e', 'extension', None, _('show only help for extensions')),
3869 ('c', 'command', None, _('show only help for commands')),
3869 ('c', 'command', None, _('show only help for commands')),
3870 ('k', 'keyword', None, _('show topics matching keyword')),
3870 ('k', 'keyword', None, _('show topics matching keyword')),
3871 ('s', 'system', [], _('show help for specific platform(s)')),
3871 ('s', 'system', [], _('show help for specific platform(s)')),
3872 ],
3872 ],
3873 _('[-ecks] [TOPIC]'),
3873 _('[-ecks] [TOPIC]'),
3874 norepo=True)
3874 norepo=True)
3875 def help_(ui, name=None, **opts):
3875 def help_(ui, name=None, **opts):
3876 """show help for a given topic or a help overview
3876 """show help for a given topic or a help overview
3877
3877
3878 With no arguments, print a list of commands with short help messages.
3878 With no arguments, print a list of commands with short help messages.
3879
3879
3880 Given a topic, extension, or command name, print help for that
3880 Given a topic, extension, or command name, print help for that
3881 topic.
3881 topic.
3882
3882
3883 Returns 0 if successful.
3883 Returns 0 if successful.
3884 """
3884 """
3885
3885
3886 textwidth = ui.configint('ui', 'textwidth', 78)
3886 textwidth = ui.configint('ui', 'textwidth', 78)
3887 termwidth = ui.termwidth() - 2
3887 termwidth = ui.termwidth() - 2
3888 if textwidth <= 0 or termwidth < textwidth:
3888 if textwidth <= 0 or termwidth < textwidth:
3889 textwidth = termwidth
3889 textwidth = termwidth
3890
3890
3891 keep = opts.get('system') or []
3891 keep = opts.get('system') or []
3892 if len(keep) == 0:
3892 if len(keep) == 0:
3893 if pycompat.sysplatform.startswith('win'):
3893 if pycompat.sysplatform.startswith('win'):
3894 keep.append('windows')
3894 keep.append('windows')
3895 elif pycompat.sysplatform == 'OpenVMS':
3895 elif pycompat.sysplatform == 'OpenVMS':
3896 keep.append('vms')
3896 keep.append('vms')
3897 elif pycompat.sysplatform == 'plan9':
3897 elif pycompat.sysplatform == 'plan9':
3898 keep.append('plan9')
3898 keep.append('plan9')
3899 else:
3899 else:
3900 keep.append('unix')
3900 keep.append('unix')
3901 keep.append(pycompat.sysplatform.lower())
3901 keep.append(pycompat.sysplatform.lower())
3902 if ui.verbose:
3902 if ui.verbose:
3903 keep.append('verbose')
3903 keep.append('verbose')
3904
3904
3905 section = None
3905 section = None
3906 subtopic = None
3906 subtopic = None
3907 if name and '.' in name:
3907 if name and '.' in name:
3908 name, remaining = name.split('.', 1)
3908 name, remaining = name.split('.', 1)
3909 remaining = encoding.lower(remaining)
3909 remaining = encoding.lower(remaining)
3910 if '.' in remaining:
3910 if '.' in remaining:
3911 subtopic, section = remaining.split('.', 1)
3911 subtopic, section = remaining.split('.', 1)
3912 else:
3912 else:
3913 if name in help.subtopics:
3913 if name in help.subtopics:
3914 subtopic = remaining
3914 subtopic = remaining
3915 else:
3915 else:
3916 section = remaining
3916 section = remaining
3917
3917
3918 text = help.help_(ui, name, subtopic=subtopic, **opts)
3918 text = help.help_(ui, name, subtopic=subtopic, **opts)
3919
3919
3920 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3920 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3921 section=section)
3921 section=section)
3922
3922
3923 # We could have been given a weird ".foo" section without a name
3923 # We could have been given a weird ".foo" section without a name
3924 # to look for, or we could have simply failed to found "foo.bar"
3924 # to look for, or we could have simply failed to found "foo.bar"
3925 # because bar isn't a section of foo
3925 # because bar isn't a section of foo
3926 if section and not (formatted and name):
3926 if section and not (formatted and name):
3927 raise error.Abort(_("help section not found"))
3927 raise error.Abort(_("help section not found"))
3928
3928
3929 if 'verbose' in pruned:
3929 if 'verbose' in pruned:
3930 keep.append('omitted')
3930 keep.append('omitted')
3931 else:
3931 else:
3932 keep.append('notomitted')
3932 keep.append('notomitted')
3933 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3933 formatted, pruned = minirst.format(text, textwidth, keep=keep,
3934 section=section)
3934 section=section)
3935 ui.write(formatted)
3935 ui.write(formatted)
3936
3936
3937
3937
3938 @command('identify|id',
3938 @command('identify|id',
3939 [('r', 'rev', '',
3939 [('r', 'rev', '',
3940 _('identify the specified revision'), _('REV')),
3940 _('identify the specified revision'), _('REV')),
3941 ('n', 'num', None, _('show local revision number')),
3941 ('n', 'num', None, _('show local revision number')),
3942 ('i', 'id', None, _('show global revision id')),
3942 ('i', 'id', None, _('show global revision id')),
3943 ('b', 'branch', None, _('show branch')),
3943 ('b', 'branch', None, _('show branch')),
3944 ('t', 'tags', None, _('show tags')),
3944 ('t', 'tags', None, _('show tags')),
3945 ('B', 'bookmarks', None, _('show bookmarks')),
3945 ('B', 'bookmarks', None, _('show bookmarks')),
3946 ] + remoteopts,
3946 ] + remoteopts,
3947 _('[-nibtB] [-r REV] [SOURCE]'),
3947 _('[-nibtB] [-r REV] [SOURCE]'),
3948 optionalrepo=True)
3948 optionalrepo=True)
3949 def identify(ui, repo, source=None, rev=None,
3949 def identify(ui, repo, source=None, rev=None,
3950 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3950 num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
3951 """identify the working directory or specified revision
3951 """identify the working directory or specified revision
3952
3952
3953 Print a summary identifying the repository state at REV using one or
3953 Print a summary identifying the repository state at REV using one or
3954 two parent hash identifiers, followed by a "+" if the working
3954 two parent hash identifiers, followed by a "+" if the working
3955 directory has uncommitted changes, the branch name (if not default),
3955 directory has uncommitted changes, the branch name (if not default),
3956 a list of tags, and a list of bookmarks.
3956 a list of tags, and a list of bookmarks.
3957
3957
3958 When REV is not given, print a summary of the current state of the
3958 When REV is not given, print a summary of the current state of the
3959 repository.
3959 repository.
3960
3960
3961 Specifying a path to a repository root or Mercurial bundle will
3961 Specifying a path to a repository root or Mercurial bundle will
3962 cause lookup to operate on that repository/bundle.
3962 cause lookup to operate on that repository/bundle.
3963
3963
3964 .. container:: verbose
3964 .. container:: verbose
3965
3965
3966 Examples:
3966 Examples:
3967
3967
3968 - generate a build identifier for the working directory::
3968 - generate a build identifier for the working directory::
3969
3969
3970 hg id --id > build-id.dat
3970 hg id --id > build-id.dat
3971
3971
3972 - find the revision corresponding to a tag::
3972 - find the revision corresponding to a tag::
3973
3973
3974 hg id -n -r 1.3
3974 hg id -n -r 1.3
3975
3975
3976 - check the most recent revision of a remote repository::
3976 - check the most recent revision of a remote repository::
3977
3977
3978 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3978 hg id -r tip https://www.mercurial-scm.org/repo/hg/
3979
3979
3980 See :hg:`log` for generating more information about specific revisions,
3980 See :hg:`log` for generating more information about specific revisions,
3981 including full hash identifiers.
3981 including full hash identifiers.
3982
3982
3983 Returns 0 if successful.
3983 Returns 0 if successful.
3984 """
3984 """
3985
3985
3986 if not repo and not source:
3986 if not repo and not source:
3987 raise error.Abort(_("there is no Mercurial repository here "
3987 raise error.Abort(_("there is no Mercurial repository here "
3988 "(.hg not found)"))
3988 "(.hg not found)"))
3989
3989
3990 if ui.debugflag:
3990 if ui.debugflag:
3991 hexfunc = hex
3991 hexfunc = hex
3992 else:
3992 else:
3993 hexfunc = short
3993 hexfunc = short
3994 default = not (num or id or branch or tags or bookmarks)
3994 default = not (num or id or branch or tags or bookmarks)
3995 output = []
3995 output = []
3996 revs = []
3996 revs = []
3997
3997
3998 if source:
3998 if source:
3999 source, branches = hg.parseurl(ui.expandpath(source))
3999 source, branches = hg.parseurl(ui.expandpath(source))
4000 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4000 peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo
4001 repo = peer.local()
4001 repo = peer.local()
4002 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4002 revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
4003
4003
4004 if not repo:
4004 if not repo:
4005 if num or branch or tags:
4005 if num or branch or tags:
4006 raise error.Abort(
4006 raise error.Abort(
4007 _("can't query remote revision number, branch, or tags"))
4007 _("can't query remote revision number, branch, or tags"))
4008 if not rev and revs:
4008 if not rev and revs:
4009 rev = revs[0]
4009 rev = revs[0]
4010 if not rev:
4010 if not rev:
4011 rev = "tip"
4011 rev = "tip"
4012
4012
4013 remoterev = peer.lookup(rev)
4013 remoterev = peer.lookup(rev)
4014 if default or id:
4014 if default or id:
4015 output = [hexfunc(remoterev)]
4015 output = [hexfunc(remoterev)]
4016
4016
4017 def getbms():
4017 def getbms():
4018 bms = []
4018 bms = []
4019
4019
4020 if 'bookmarks' in peer.listkeys('namespaces'):
4020 if 'bookmarks' in peer.listkeys('namespaces'):
4021 hexremoterev = hex(remoterev)
4021 hexremoterev = hex(remoterev)
4022 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4022 bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
4023 if bmr == hexremoterev]
4023 if bmr == hexremoterev]
4024
4024
4025 return sorted(bms)
4025 return sorted(bms)
4026
4026
4027 if bookmarks:
4027 if bookmarks:
4028 output.extend(getbms())
4028 output.extend(getbms())
4029 elif default and not ui.quiet:
4029 elif default and not ui.quiet:
4030 # multiple bookmarks for a single parent separated by '/'
4030 # multiple bookmarks for a single parent separated by '/'
4031 bm = '/'.join(getbms())
4031 bm = '/'.join(getbms())
4032 if bm:
4032 if bm:
4033 output.append(bm)
4033 output.append(bm)
4034 else:
4034 else:
4035 ctx = scmutil.revsingle(repo, rev, None)
4035 ctx = scmutil.revsingle(repo, rev, None)
4036
4036
4037 if ctx.rev() is None:
4037 if ctx.rev() is None:
4038 ctx = repo[None]
4038 ctx = repo[None]
4039 parents = ctx.parents()
4039 parents = ctx.parents()
4040 taglist = []
4040 taglist = []
4041 for p in parents:
4041 for p in parents:
4042 taglist.extend(p.tags())
4042 taglist.extend(p.tags())
4043
4043
4044 changed = ""
4044 changed = ""
4045 if default or id or num:
4045 if default or id or num:
4046 if (any(repo.status())
4046 if (any(repo.status())
4047 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4047 or any(ctx.sub(s).dirty() for s in ctx.substate)):
4048 changed = '+'
4048 changed = '+'
4049 if default or id:
4049 if default or id:
4050 output = ["%s%s" %
4050 output = ["%s%s" %
4051 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4051 ('+'.join([hexfunc(p.node()) for p in parents]), changed)]
4052 if num:
4052 if num:
4053 output.append("%s%s" %
4053 output.append("%s%s" %
4054 ('+'.join([str(p.rev()) for p in parents]), changed))
4054 ('+'.join([str(p.rev()) for p in parents]), changed))
4055 else:
4055 else:
4056 if default or id:
4056 if default or id:
4057 output = [hexfunc(ctx.node())]
4057 output = [hexfunc(ctx.node())]
4058 if num:
4058 if num:
4059 output.append(str(ctx.rev()))
4059 output.append(str(ctx.rev()))
4060 taglist = ctx.tags()
4060 taglist = ctx.tags()
4061
4061
4062 if default and not ui.quiet:
4062 if default and not ui.quiet:
4063 b = ctx.branch()
4063 b = ctx.branch()
4064 if b != 'default':
4064 if b != 'default':
4065 output.append("(%s)" % b)
4065 output.append("(%s)" % b)
4066
4066
4067 # multiple tags for a single parent separated by '/'
4067 # multiple tags for a single parent separated by '/'
4068 t = '/'.join(taglist)
4068 t = '/'.join(taglist)
4069 if t:
4069 if t:
4070 output.append(t)
4070 output.append(t)
4071
4071
4072 # multiple bookmarks for a single parent separated by '/'
4072 # multiple bookmarks for a single parent separated by '/'
4073 bm = '/'.join(ctx.bookmarks())
4073 bm = '/'.join(ctx.bookmarks())
4074 if bm:
4074 if bm:
4075 output.append(bm)
4075 output.append(bm)
4076 else:
4076 else:
4077 if branch:
4077 if branch:
4078 output.append(ctx.branch())
4078 output.append(ctx.branch())
4079
4079
4080 if tags:
4080 if tags:
4081 output.extend(taglist)
4081 output.extend(taglist)
4082
4082
4083 if bookmarks:
4083 if bookmarks:
4084 output.extend(ctx.bookmarks())
4084 output.extend(ctx.bookmarks())
4085
4085
4086 ui.write("%s\n" % ' '.join(output))
4086 ui.write("%s\n" % ' '.join(output))
4087
4087
4088 @command('import|patch',
4088 @command('import|patch',
4089 [('p', 'strip', 1,
4089 [('p', 'strip', 1,
4090 _('directory strip option for patch. This has the same '
4090 _('directory strip option for patch. This has the same '
4091 'meaning as the corresponding patch option'), _('NUM')),
4091 'meaning as the corresponding patch option'), _('NUM')),
4092 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4092 ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
4093 ('e', 'edit', False, _('invoke editor on commit messages')),
4093 ('e', 'edit', False, _('invoke editor on commit messages')),
4094 ('f', 'force', None,
4094 ('f', 'force', None,
4095 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4095 _('skip check for outstanding uncommitted changes (DEPRECATED)')),
4096 ('', 'no-commit', None,
4096 ('', 'no-commit', None,
4097 _("don't commit, just update the working directory")),
4097 _("don't commit, just update the working directory")),
4098 ('', 'bypass', None,
4098 ('', 'bypass', None,
4099 _("apply patch without touching the working directory")),
4099 _("apply patch without touching the working directory")),
4100 ('', 'partial', None,
4100 ('', 'partial', None,
4101 _('commit even if some hunks fail')),
4101 _('commit even if some hunks fail')),
4102 ('', 'exact', None,
4102 ('', 'exact', None,
4103 _('abort if patch would apply lossily')),
4103 _('abort if patch would apply lossily')),
4104 ('', 'prefix', '',
4104 ('', 'prefix', '',
4105 _('apply patch to subdirectory'), _('DIR')),
4105 _('apply patch to subdirectory'), _('DIR')),
4106 ('', 'import-branch', None,
4106 ('', 'import-branch', None,
4107 _('use any branch information in patch (implied by --exact)'))] +
4107 _('use any branch information in patch (implied by --exact)'))] +
4108 commitopts + commitopts2 + similarityopts,
4108 commitopts + commitopts2 + similarityopts,
4109 _('[OPTION]... PATCH...'))
4109 _('[OPTION]... PATCH...'))
4110 def import_(ui, repo, patch1=None, *patches, **opts):
4110 def import_(ui, repo, patch1=None, *patches, **opts):
4111 """import an ordered set of patches
4111 """import an ordered set of patches
4112
4112
4113 Import a list of patches and commit them individually (unless
4113 Import a list of patches and commit them individually (unless
4114 --no-commit is specified).
4114 --no-commit is specified).
4115
4115
4116 To read a patch from standard input, use "-" as the patch name. If
4116 To read a patch from standard input, use "-" as the patch name. If
4117 a URL is specified, the patch will be downloaded from there.
4117 a URL is specified, the patch will be downloaded from there.
4118
4118
4119 Import first applies changes to the working directory (unless
4119 Import first applies changes to the working directory (unless
4120 --bypass is specified), import will abort if there are outstanding
4120 --bypass is specified), import will abort if there are outstanding
4121 changes.
4121 changes.
4122
4122
4123 Use --bypass to apply and commit patches directly to the
4123 Use --bypass to apply and commit patches directly to the
4124 repository, without affecting the working directory. Without
4124 repository, without affecting the working directory. Without
4125 --exact, patches will be applied on top of the working directory
4125 --exact, patches will be applied on top of the working directory
4126 parent revision.
4126 parent revision.
4127
4127
4128 You can import a patch straight from a mail message. Even patches
4128 You can import a patch straight from a mail message. Even patches
4129 as attachments work (to use the body part, it must have type
4129 as attachments work (to use the body part, it must have type
4130 text/plain or text/x-patch). From and Subject headers of email
4130 text/plain or text/x-patch). From and Subject headers of email
4131 message are used as default committer and commit message. All
4131 message are used as default committer and commit message. All
4132 text/plain body parts before first diff are added to the commit
4132 text/plain body parts before first diff are added to the commit
4133 message.
4133 message.
4134
4134
4135 If the imported patch was generated by :hg:`export`, user and
4135 If the imported patch was generated by :hg:`export`, user and
4136 description from patch override values from message headers and
4136 description from patch override values from message headers and
4137 body. Values given on command line with -m/--message and -u/--user
4137 body. Values given on command line with -m/--message and -u/--user
4138 override these.
4138 override these.
4139
4139
4140 If --exact is specified, import will set the working directory to
4140 If --exact is specified, import will set the working directory to
4141 the parent of each patch before applying it, and will abort if the
4141 the parent of each patch before applying it, and will abort if the
4142 resulting changeset has a different ID than the one recorded in
4142 resulting changeset has a different ID than the one recorded in
4143 the patch. This will guard against various ways that portable
4143 the patch. This will guard against various ways that portable
4144 patch formats and mail systems might fail to transfer Mercurial
4144 patch formats and mail systems might fail to transfer Mercurial
4145 data or metadata. See :hg:`bundle` for lossless transmission.
4145 data or metadata. See :hg:`bundle` for lossless transmission.
4146
4146
4147 Use --partial to ensure a changeset will be created from the patch
4147 Use --partial to ensure a changeset will be created from the patch
4148 even if some hunks fail to apply. Hunks that fail to apply will be
4148 even if some hunks fail to apply. Hunks that fail to apply will be
4149 written to a <target-file>.rej file. Conflicts can then be resolved
4149 written to a <target-file>.rej file. Conflicts can then be resolved
4150 by hand before :hg:`commit --amend` is run to update the created
4150 by hand before :hg:`commit --amend` is run to update the created
4151 changeset. This flag exists to let people import patches that
4151 changeset. This flag exists to let people import patches that
4152 partially apply without losing the associated metadata (author,
4152 partially apply without losing the associated metadata (author,
4153 date, description, ...).
4153 date, description, ...).
4154
4154
4155 .. note::
4155 .. note::
4156
4156
4157 When no hunks apply cleanly, :hg:`import --partial` will create
4157 When no hunks apply cleanly, :hg:`import --partial` will create
4158 an empty changeset, importing only the patch metadata.
4158 an empty changeset, importing only the patch metadata.
4159
4159
4160 With -s/--similarity, hg will attempt to discover renames and
4160 With -s/--similarity, hg will attempt to discover renames and
4161 copies in the patch in the same way as :hg:`addremove`.
4161 copies in the patch in the same way as :hg:`addremove`.
4162
4162
4163 It is possible to use external patch programs to perform the patch
4163 It is possible to use external patch programs to perform the patch
4164 by setting the ``ui.patch`` configuration option. For the default
4164 by setting the ``ui.patch`` configuration option. For the default
4165 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4165 internal tool, the fuzz can also be configured via ``patch.fuzz``.
4166 See :hg:`help config` for more information about configuration
4166 See :hg:`help config` for more information about configuration
4167 files and how to use these options.
4167 files and how to use these options.
4168
4168
4169 See :hg:`help dates` for a list of formats valid for -d/--date.
4169 See :hg:`help dates` for a list of formats valid for -d/--date.
4170
4170
4171 .. container:: verbose
4171 .. container:: verbose
4172
4172
4173 Examples:
4173 Examples:
4174
4174
4175 - import a traditional patch from a website and detect renames::
4175 - import a traditional patch from a website and detect renames::
4176
4176
4177 hg import -s 80 http://example.com/bugfix.patch
4177 hg import -s 80 http://example.com/bugfix.patch
4178
4178
4179 - import a changeset from an hgweb server::
4179 - import a changeset from an hgweb server::
4180
4180
4181 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4181 hg import https://www.mercurial-scm.org/repo/hg/rev/5ca8c111e9aa
4182
4182
4183 - import all the patches in an Unix-style mbox::
4183 - import all the patches in an Unix-style mbox::
4184
4184
4185 hg import incoming-patches.mbox
4185 hg import incoming-patches.mbox
4186
4186
4187 - attempt to exactly restore an exported changeset (not always
4187 - attempt to exactly restore an exported changeset (not always
4188 possible)::
4188 possible)::
4189
4189
4190 hg import --exact proposed-fix.patch
4190 hg import --exact proposed-fix.patch
4191
4191
4192 - use an external tool to apply a patch which is too fuzzy for
4192 - use an external tool to apply a patch which is too fuzzy for
4193 the default internal tool.
4193 the default internal tool.
4194
4194
4195 hg import --config ui.patch="patch --merge" fuzzy.patch
4195 hg import --config ui.patch="patch --merge" fuzzy.patch
4196
4196
4197 - change the default fuzzing from 2 to a less strict 7
4197 - change the default fuzzing from 2 to a less strict 7
4198
4198
4199 hg import --config ui.fuzz=7 fuzz.patch
4199 hg import --config ui.fuzz=7 fuzz.patch
4200
4200
4201 Returns 0 on success, 1 on partial success (see --partial).
4201 Returns 0 on success, 1 on partial success (see --partial).
4202 """
4202 """
4203
4203
4204 if not patch1:
4204 if not patch1:
4205 raise error.Abort(_('need at least one patch to import'))
4205 raise error.Abort(_('need at least one patch to import'))
4206
4206
4207 patches = (patch1,) + patches
4207 patches = (patch1,) + patches
4208
4208
4209 date = opts.get('date')
4209 date = opts.get('date')
4210 if date:
4210 if date:
4211 opts['date'] = util.parsedate(date)
4211 opts['date'] = util.parsedate(date)
4212
4212
4213 exact = opts.get('exact')
4213 exact = opts.get('exact')
4214 update = not opts.get('bypass')
4214 update = not opts.get('bypass')
4215 if not update and opts.get('no_commit'):
4215 if not update and opts.get('no_commit'):
4216 raise error.Abort(_('cannot use --no-commit with --bypass'))
4216 raise error.Abort(_('cannot use --no-commit with --bypass'))
4217 try:
4217 try:
4218 sim = float(opts.get('similarity') or 0)
4218 sim = float(opts.get('similarity') or 0)
4219 except ValueError:
4219 except ValueError:
4220 raise error.Abort(_('similarity must be a number'))
4220 raise error.Abort(_('similarity must be a number'))
4221 if sim < 0 or sim > 100:
4221 if sim < 0 or sim > 100:
4222 raise error.Abort(_('similarity must be between 0 and 100'))
4222 raise error.Abort(_('similarity must be between 0 and 100'))
4223 if sim and not update:
4223 if sim and not update:
4224 raise error.Abort(_('cannot use --similarity with --bypass'))
4224 raise error.Abort(_('cannot use --similarity with --bypass'))
4225 if exact:
4225 if exact:
4226 if opts.get('edit'):
4226 if opts.get('edit'):
4227 raise error.Abort(_('cannot use --exact with --edit'))
4227 raise error.Abort(_('cannot use --exact with --edit'))
4228 if opts.get('prefix'):
4228 if opts.get('prefix'):
4229 raise error.Abort(_('cannot use --exact with --prefix'))
4229 raise error.Abort(_('cannot use --exact with --prefix'))
4230
4230
4231 base = opts["base"]
4231 base = opts["base"]
4232 wlock = dsguard = lock = tr = None
4232 wlock = dsguard = lock = tr = None
4233 msgs = []
4233 msgs = []
4234 ret = 0
4234 ret = 0
4235
4235
4236
4236
4237 try:
4237 try:
4238 wlock = repo.wlock()
4238 wlock = repo.wlock()
4239
4239
4240 if update:
4240 if update:
4241 cmdutil.checkunfinished(repo)
4241 cmdutil.checkunfinished(repo)
4242 if (exact or not opts.get('force')):
4242 if (exact or not opts.get('force')):
4243 cmdutil.bailifchanged(repo)
4243 cmdutil.bailifchanged(repo)
4244
4244
4245 if not opts.get('no_commit'):
4245 if not opts.get('no_commit'):
4246 lock = repo.lock()
4246 lock = repo.lock()
4247 tr = repo.transaction('import')
4247 tr = repo.transaction('import')
4248 else:
4248 else:
4249 dsguard = dirstateguard.dirstateguard(repo, 'import')
4249 dsguard = dirstateguard.dirstateguard(repo, 'import')
4250 parents = repo[None].parents()
4250 parents = repo[None].parents()
4251 for patchurl in patches:
4251 for patchurl in patches:
4252 if patchurl == '-':
4252 if patchurl == '-':
4253 ui.status(_('applying patch from stdin\n'))
4253 ui.status(_('applying patch from stdin\n'))
4254 patchfile = ui.fin
4254 patchfile = ui.fin
4255 patchurl = 'stdin' # for error message
4255 patchurl = 'stdin' # for error message
4256 else:
4256 else:
4257 patchurl = os.path.join(base, patchurl)
4257 patchurl = os.path.join(base, patchurl)
4258 ui.status(_('applying %s\n') % patchurl)
4258 ui.status(_('applying %s\n') % patchurl)
4259 patchfile = hg.openpath(ui, patchurl)
4259 patchfile = hg.openpath(ui, patchurl)
4260
4260
4261 haspatch = False
4261 haspatch = False
4262 for hunk in patch.split(patchfile):
4262 for hunk in patch.split(patchfile):
4263 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4263 (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk,
4264 parents, opts,
4264 parents, opts,
4265 msgs, hg.clean)
4265 msgs, hg.clean)
4266 if msg:
4266 if msg:
4267 haspatch = True
4267 haspatch = True
4268 ui.note(msg + '\n')
4268 ui.note(msg + '\n')
4269 if update or exact:
4269 if update or exact:
4270 parents = repo[None].parents()
4270 parents = repo[None].parents()
4271 else:
4271 else:
4272 parents = [repo[node]]
4272 parents = [repo[node]]
4273 if rej:
4273 if rej:
4274 ui.write_err(_("patch applied partially\n"))
4274 ui.write_err(_("patch applied partially\n"))
4275 ui.write_err(_("(fix the .rej files and run "
4275 ui.write_err(_("(fix the .rej files and run "
4276 "`hg commit --amend`)\n"))
4276 "`hg commit --amend`)\n"))
4277 ret = 1
4277 ret = 1
4278 break
4278 break
4279
4279
4280 if not haspatch:
4280 if not haspatch:
4281 raise error.Abort(_('%s: no diffs found') % patchurl)
4281 raise error.Abort(_('%s: no diffs found') % patchurl)
4282
4282
4283 if tr:
4283 if tr:
4284 tr.close()
4284 tr.close()
4285 if msgs:
4285 if msgs:
4286 repo.savecommitmessage('\n* * *\n'.join(msgs))
4286 repo.savecommitmessage('\n* * *\n'.join(msgs))
4287 if dsguard:
4287 if dsguard:
4288 dsguard.close()
4288 dsguard.close()
4289 return ret
4289 return ret
4290 finally:
4290 finally:
4291 if tr:
4291 if tr:
4292 tr.release()
4292 tr.release()
4293 release(lock, dsguard, wlock)
4293 release(lock, dsguard, wlock)
4294
4294
4295 @command('incoming|in',
4295 @command('incoming|in',
4296 [('f', 'force', None,
4296 [('f', 'force', None,
4297 _('run even if remote repository is unrelated')),
4297 _('run even if remote repository is unrelated')),
4298 ('n', 'newest-first', None, _('show newest record first')),
4298 ('n', 'newest-first', None, _('show newest record first')),
4299 ('', 'bundle', '',
4299 ('', 'bundle', '',
4300 _('file to store the bundles into'), _('FILE')),
4300 _('file to store the bundles into'), _('FILE')),
4301 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4301 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
4302 ('B', 'bookmarks', False, _("compare bookmarks")),
4302 ('B', 'bookmarks', False, _("compare bookmarks")),
4303 ('b', 'branch', [],
4303 ('b', 'branch', [],
4304 _('a specific branch you would like to pull'), _('BRANCH')),
4304 _('a specific branch you would like to pull'), _('BRANCH')),
4305 ] + logopts + remoteopts + subrepoopts,
4305 ] + logopts + remoteopts + subrepoopts,
4306 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4306 _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]'))
4307 def incoming(ui, repo, source="default", **opts):
4307 def incoming(ui, repo, source="default", **opts):
4308 """show new changesets found in source
4308 """show new changesets found in source
4309
4309
4310 Show new changesets found in the specified path/URL or the default
4310 Show new changesets found in the specified path/URL or the default
4311 pull location. These are the changesets that would have been pulled
4311 pull location. These are the changesets that would have been pulled
4312 if a pull at the time you issued this command.
4312 if a pull at the time you issued this command.
4313
4313
4314 See pull for valid source format details.
4314 See pull for valid source format details.
4315
4315
4316 .. container:: verbose
4316 .. container:: verbose
4317
4317
4318 With -B/--bookmarks, the result of bookmark comparison between
4318 With -B/--bookmarks, the result of bookmark comparison between
4319 local and remote repositories is displayed. With -v/--verbose,
4319 local and remote repositories is displayed. With -v/--verbose,
4320 status is also displayed for each bookmark like below::
4320 status is also displayed for each bookmark like below::
4321
4321
4322 BM1 01234567890a added
4322 BM1 01234567890a added
4323 BM2 1234567890ab advanced
4323 BM2 1234567890ab advanced
4324 BM3 234567890abc diverged
4324 BM3 234567890abc diverged
4325 BM4 34567890abcd changed
4325 BM4 34567890abcd changed
4326
4326
4327 The action taken locally when pulling depends on the
4327 The action taken locally when pulling depends on the
4328 status of each bookmark:
4328 status of each bookmark:
4329
4329
4330 :``added``: pull will create it
4330 :``added``: pull will create it
4331 :``advanced``: pull will update it
4331 :``advanced``: pull will update it
4332 :``diverged``: pull will create a divergent bookmark
4332 :``diverged``: pull will create a divergent bookmark
4333 :``changed``: result depends on remote changesets
4333 :``changed``: result depends on remote changesets
4334
4334
4335 From the point of view of pulling behavior, bookmark
4335 From the point of view of pulling behavior, bookmark
4336 existing only in the remote repository are treated as ``added``,
4336 existing only in the remote repository are treated as ``added``,
4337 even if it is in fact locally deleted.
4337 even if it is in fact locally deleted.
4338
4338
4339 .. container:: verbose
4339 .. container:: verbose
4340
4340
4341 For remote repository, using --bundle avoids downloading the
4341 For remote repository, using --bundle avoids downloading the
4342 changesets twice if the incoming is followed by a pull.
4342 changesets twice if the incoming is followed by a pull.
4343
4343
4344 Examples:
4344 Examples:
4345
4345
4346 - show incoming changes with patches and full description::
4346 - show incoming changes with patches and full description::
4347
4347
4348 hg incoming -vp
4348 hg incoming -vp
4349
4349
4350 - show incoming changes excluding merges, store a bundle::
4350 - show incoming changes excluding merges, store a bundle::
4351
4351
4352 hg in -vpM --bundle incoming.hg
4352 hg in -vpM --bundle incoming.hg
4353 hg pull incoming.hg
4353 hg pull incoming.hg
4354
4354
4355 - briefly list changes inside a bundle::
4355 - briefly list changes inside a bundle::
4356
4356
4357 hg in changes.hg -T "{desc|firstline}\\n"
4357 hg in changes.hg -T "{desc|firstline}\\n"
4358
4358
4359 Returns 0 if there are incoming changes, 1 otherwise.
4359 Returns 0 if there are incoming changes, 1 otherwise.
4360 """
4360 """
4361 if opts.get('graph'):
4361 if opts.get('graph'):
4362 cmdutil.checkunsupportedgraphflags([], opts)
4362 cmdutil.checkunsupportedgraphflags([], opts)
4363 def display(other, chlist, displayer):
4363 def display(other, chlist, displayer):
4364 revdag = cmdutil.graphrevs(other, chlist, opts)
4364 revdag = cmdutil.graphrevs(other, chlist, opts)
4365 cmdutil.displaygraph(ui, repo, revdag, displayer,
4365 cmdutil.displaygraph(ui, repo, revdag, displayer,
4366 graphmod.asciiedges)
4366 graphmod.asciiedges)
4367
4367
4368 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4368 hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
4369 return 0
4369 return 0
4370
4370
4371 if opts.get('bundle') and opts.get('subrepos'):
4371 if opts.get('bundle') and opts.get('subrepos'):
4372 raise error.Abort(_('cannot combine --bundle and --subrepos'))
4372 raise error.Abort(_('cannot combine --bundle and --subrepos'))
4373
4373
4374 if opts.get('bookmarks'):
4374 if opts.get('bookmarks'):
4375 source, branches = hg.parseurl(ui.expandpath(source),
4375 source, branches = hg.parseurl(ui.expandpath(source),
4376 opts.get('branch'))
4376 opts.get('branch'))
4377 other = hg.peer(repo, opts, source)
4377 other = hg.peer(repo, opts, source)
4378 if 'bookmarks' not in other.listkeys('namespaces'):
4378 if 'bookmarks' not in other.listkeys('namespaces'):
4379 ui.warn(_("remote doesn't support bookmarks\n"))
4379 ui.warn(_("remote doesn't support bookmarks\n"))
4380 return 0
4380 return 0
4381 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4381 ui.status(_('comparing with %s\n') % util.hidepassword(source))
4382 return bookmarks.incoming(ui, repo, other)
4382 return bookmarks.incoming(ui, repo, other)
4383
4383
4384 repo._subtoppath = ui.expandpath(source)
4384 repo._subtoppath = ui.expandpath(source)
4385 try:
4385 try:
4386 return hg.incoming(ui, repo, source, opts)
4386 return hg.incoming(ui, repo, source, opts)
4387 finally:
4387 finally:
4388 del repo._subtoppath
4388 del repo._subtoppath
4389
4389
4390
4390
4391 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4391 @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'),
4392 norepo=True)
4392 norepo=True)
4393 def init(ui, dest=".", **opts):
4393 def init(ui, dest=".", **opts):
4394 """create a new repository in the given directory
4394 """create a new repository in the given directory
4395
4395
4396 Initialize a new repository in the given directory. If the given
4396 Initialize a new repository in the given directory. If the given
4397 directory does not exist, it will be created.
4397 directory does not exist, it will be created.
4398
4398
4399 If no directory is given, the current directory is used.
4399 If no directory is given, the current directory is used.
4400
4400
4401 It is possible to specify an ``ssh://`` URL as the destination.
4401 It is possible to specify an ``ssh://`` URL as the destination.
4402 See :hg:`help urls` for more information.
4402 See :hg:`help urls` for more information.
4403
4403
4404 Returns 0 on success.
4404 Returns 0 on success.
4405 """
4405 """
4406 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4406 hg.peer(ui, opts, ui.expandpath(dest), create=True)
4407
4407
4408 @command('locate',
4408 @command('locate',
4409 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4409 [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')),
4410 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4410 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
4411 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4411 ('f', 'fullpath', None, _('print complete paths from the filesystem root')),
4412 ] + walkopts,
4412 ] + walkopts,
4413 _('[OPTION]... [PATTERN]...'))
4413 _('[OPTION]... [PATTERN]...'))
4414 def locate(ui, repo, *pats, **opts):
4414 def locate(ui, repo, *pats, **opts):
4415 """locate files matching specific patterns (DEPRECATED)
4415 """locate files matching specific patterns (DEPRECATED)
4416
4416
4417 Print files under Mercurial control in the working directory whose
4417 Print files under Mercurial control in the working directory whose
4418 names match the given patterns.
4418 names match the given patterns.
4419
4419
4420 By default, this command searches all directories in the working
4420 By default, this command searches all directories in the working
4421 directory. To search just the current directory and its
4421 directory. To search just the current directory and its
4422 subdirectories, use "--include .".
4422 subdirectories, use "--include .".
4423
4423
4424 If no patterns are given to match, this command prints the names
4424 If no patterns are given to match, this command prints the names
4425 of all files under Mercurial control in the working directory.
4425 of all files under Mercurial control in the working directory.
4426
4426
4427 If you want to feed the output of this command into the "xargs"
4427 If you want to feed the output of this command into the "xargs"
4428 command, use the -0 option to both this command and "xargs". This
4428 command, use the -0 option to both this command and "xargs". This
4429 will avoid the problem of "xargs" treating single filenames that
4429 will avoid the problem of "xargs" treating single filenames that
4430 contain whitespace as multiple filenames.
4430 contain whitespace as multiple filenames.
4431
4431
4432 See :hg:`help files` for a more versatile command.
4432 See :hg:`help files` for a more versatile command.
4433
4433
4434 Returns 0 if a match is found, 1 otherwise.
4434 Returns 0 if a match is found, 1 otherwise.
4435 """
4435 """
4436 if opts.get('print0'):
4436 if opts.get('print0'):
4437 end = '\0'
4437 end = '\0'
4438 else:
4438 else:
4439 end = '\n'
4439 end = '\n'
4440 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4440 rev = scmutil.revsingle(repo, opts.get('rev'), None).node()
4441
4441
4442 ret = 1
4442 ret = 1
4443 ctx = repo[rev]
4443 ctx = repo[rev]
4444 m = scmutil.match(ctx, pats, opts, default='relglob',
4444 m = scmutil.match(ctx, pats, opts, default='relglob',
4445 badfn=lambda x, y: False)
4445 badfn=lambda x, y: False)
4446
4446
4447 for abs in ctx.matches(m):
4447 for abs in ctx.matches(m):
4448 if opts.get('fullpath'):
4448 if opts.get('fullpath'):
4449 ui.write(repo.wjoin(abs), end)
4449 ui.write(repo.wjoin(abs), end)
4450 else:
4450 else:
4451 ui.write(((pats and m.rel(abs)) or abs), end)
4451 ui.write(((pats and m.rel(abs)) or abs), end)
4452 ret = 0
4452 ret = 0
4453
4453
4454 return ret
4454 return ret
4455
4455
4456 @command('^log|history',
4456 @command('^log|history',
4457 [('f', 'follow', None,
4457 [('f', 'follow', None,
4458 _('follow changeset history, or file history across copies and renames')),
4458 _('follow changeset history, or file history across copies and renames')),
4459 ('', 'follow-first', None,
4459 ('', 'follow-first', None,
4460 _('only follow the first parent of merge changesets (DEPRECATED)')),
4460 _('only follow the first parent of merge changesets (DEPRECATED)')),
4461 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4461 ('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
4462 ('C', 'copies', None, _('show copied files')),
4462 ('C', 'copies', None, _('show copied files')),
4463 ('k', 'keyword', [],
4463 ('k', 'keyword', [],
4464 _('do case-insensitive search for a given text'), _('TEXT')),
4464 _('do case-insensitive search for a given text'), _('TEXT')),
4465 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4465 ('r', 'rev', [], _('show the specified revision or revset'), _('REV')),
4466 ('', 'removed', None, _('include revisions where files were removed')),
4466 ('', 'removed', None, _('include revisions where files were removed')),
4467 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4467 ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
4468 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4468 ('u', 'user', [], _('revisions committed by user'), _('USER')),
4469 ('', 'only-branch', [],
4469 ('', 'only-branch', [],
4470 _('show only changesets within the given named branch (DEPRECATED)'),
4470 _('show only changesets within the given named branch (DEPRECATED)'),
4471 _('BRANCH')),
4471 _('BRANCH')),
4472 ('b', 'branch', [],
4472 ('b', 'branch', [],
4473 _('show changesets within the given named branch'), _('BRANCH')),
4473 _('show changesets within the given named branch'), _('BRANCH')),
4474 ('P', 'prune', [],
4474 ('P', 'prune', [],
4475 _('do not display revision or any of its ancestors'), _('REV')),
4475 _('do not display revision or any of its ancestors'), _('REV')),
4476 ] + logopts + walkopts,
4476 ] + logopts + walkopts,
4477 _('[OPTION]... [FILE]'),
4477 _('[OPTION]... [FILE]'),
4478 inferrepo=True)
4478 inferrepo=True)
4479 def log(ui, repo, *pats, **opts):
4479 def log(ui, repo, *pats, **opts):
4480 """show revision history of entire repository or files
4480 """show revision history of entire repository or files
4481
4481
4482 Print the revision history of the specified files or the entire
4482 Print the revision history of the specified files or the entire
4483 project.
4483 project.
4484
4484
4485 If no revision range is specified, the default is ``tip:0`` unless
4485 If no revision range is specified, the default is ``tip:0`` unless
4486 --follow is set, in which case the working directory parent is
4486 --follow is set, in which case the working directory parent is
4487 used as the starting revision.
4487 used as the starting revision.
4488
4488
4489 File history is shown without following rename or copy history of
4489 File history is shown without following rename or copy history of
4490 files. Use -f/--follow with a filename to follow history across
4490 files. Use -f/--follow with a filename to follow history across
4491 renames and copies. --follow without a filename will only show
4491 renames and copies. --follow without a filename will only show
4492 ancestors or descendants of the starting revision.
4492 ancestors or descendants of the starting revision.
4493
4493
4494 By default this command prints revision number and changeset id,
4494 By default this command prints revision number and changeset id,
4495 tags, non-trivial parents, user, date and time, and a summary for
4495 tags, non-trivial parents, user, date and time, and a summary for
4496 each commit. When the -v/--verbose switch is used, the list of
4496 each commit. When the -v/--verbose switch is used, the list of
4497 changed files and full commit message are shown.
4497 changed files and full commit message are shown.
4498
4498
4499 With --graph the revisions are shown as an ASCII art DAG with the most
4499 With --graph the revisions are shown as an ASCII art DAG with the most
4500 recent changeset at the top.
4500 recent changeset at the top.
4501 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4501 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete,
4502 and '+' represents a fork where the changeset from the lines below is a
4502 and '+' represents a fork where the changeset from the lines below is a
4503 parent of the 'o' merge on the same line.
4503 parent of the 'o' merge on the same line.
4504
4504
4505 .. note::
4505 .. note::
4506
4506
4507 :hg:`log --patch` may generate unexpected diff output for merge
4507 :hg:`log --patch` may generate unexpected diff output for merge
4508 changesets, as it will only compare the merge changeset against
4508 changesets, as it will only compare the merge changeset against
4509 its first parent. Also, only files different from BOTH parents
4509 its first parent. Also, only files different from BOTH parents
4510 will appear in files:.
4510 will appear in files:.
4511
4511
4512 .. note::
4512 .. note::
4513
4513
4514 For performance reasons, :hg:`log FILE` may omit duplicate changes
4514 For performance reasons, :hg:`log FILE` may omit duplicate changes
4515 made on branches and will not show removals or mode changes. To
4515 made on branches and will not show removals or mode changes. To
4516 see all such changes, use the --removed switch.
4516 see all such changes, use the --removed switch.
4517
4517
4518 .. container:: verbose
4518 .. container:: verbose
4519
4519
4520 Some examples:
4520 Some examples:
4521
4521
4522 - changesets with full descriptions and file lists::
4522 - changesets with full descriptions and file lists::
4523
4523
4524 hg log -v
4524 hg log -v
4525
4525
4526 - changesets ancestral to the working directory::
4526 - changesets ancestral to the working directory::
4527
4527
4528 hg log -f
4528 hg log -f
4529
4529
4530 - last 10 commits on the current branch::
4530 - last 10 commits on the current branch::
4531
4531
4532 hg log -l 10 -b .
4532 hg log -l 10 -b .
4533
4533
4534 - changesets showing all modifications of a file, including removals::
4534 - changesets showing all modifications of a file, including removals::
4535
4535
4536 hg log --removed file.c
4536 hg log --removed file.c
4537
4537
4538 - all changesets that touch a directory, with diffs, excluding merges::
4538 - all changesets that touch a directory, with diffs, excluding merges::
4539
4539
4540 hg log -Mp lib/
4540 hg log -Mp lib/
4541
4541
4542 - all revision numbers that match a keyword::
4542 - all revision numbers that match a keyword::
4543
4543
4544 hg log -k bug --template "{rev}\\n"
4544 hg log -k bug --template "{rev}\\n"
4545
4545
4546 - the full hash identifier of the working directory parent::
4546 - the full hash identifier of the working directory parent::
4547
4547
4548 hg log -r . --template "{node}\\n"
4548 hg log -r . --template "{node}\\n"
4549
4549
4550 - list available log templates::
4550 - list available log templates::
4551
4551
4552 hg log -T list
4552 hg log -T list
4553
4553
4554 - check if a given changeset is included in a tagged release::
4554 - check if a given changeset is included in a tagged release::
4555
4555
4556 hg log -r "a21ccf and ancestor(1.9)"
4556 hg log -r "a21ccf and ancestor(1.9)"
4557
4557
4558 - find all changesets by some user in a date range::
4558 - find all changesets by some user in a date range::
4559
4559
4560 hg log -k alice -d "may 2008 to jul 2008"
4560 hg log -k alice -d "may 2008 to jul 2008"
4561
4561
4562 - summary of all changesets after the last tag::
4562 - summary of all changesets after the last tag::
4563
4563
4564 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4564 hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
4565
4565
4566 See :hg:`help dates` for a list of formats valid for -d/--date.
4566 See :hg:`help dates` for a list of formats valid for -d/--date.
4567
4567
4568 See :hg:`help revisions` and :hg:`help revsets` for more about
4568 See :hg:`help revisions` and :hg:`help revsets` for more about
4569 specifying and ordering revisions.
4569 specifying and ordering revisions.
4570
4570
4571 See :hg:`help templates` for more about pre-packaged styles and
4571 See :hg:`help templates` for more about pre-packaged styles and
4572 specifying custom templates.
4572 specifying custom templates.
4573
4573
4574 Returns 0 on success.
4574 Returns 0 on success.
4575
4575
4576 """
4576 """
4577 if opts.get('follow') and opts.get('rev'):
4577 if opts.get('follow') and opts.get('rev'):
4578 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4578 opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))]
4579 del opts['follow']
4579 del opts['follow']
4580
4580
4581 if opts.get('graph'):
4581 if opts.get('graph'):
4582 return cmdutil.graphlog(ui, repo, *pats, **opts)
4582 return cmdutil.graphlog(ui, repo, *pats, **opts)
4583
4583
4584 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4584 revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts)
4585 limit = cmdutil.loglimit(opts)
4585 limit = cmdutil.loglimit(opts)
4586 count = 0
4586 count = 0
4587
4587
4588 getrenamed = None
4588 getrenamed = None
4589 if opts.get('copies'):
4589 if opts.get('copies'):
4590 endrev = None
4590 endrev = None
4591 if opts.get('rev'):
4591 if opts.get('rev'):
4592 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4592 endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1
4593 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4593 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
4594
4594
4595 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4595 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4596 for rev in revs:
4596 for rev in revs:
4597 if count == limit:
4597 if count == limit:
4598 break
4598 break
4599 ctx = repo[rev]
4599 ctx = repo[rev]
4600 copies = None
4600 copies = None
4601 if getrenamed is not None and rev:
4601 if getrenamed is not None and rev:
4602 copies = []
4602 copies = []
4603 for fn in ctx.files():
4603 for fn in ctx.files():
4604 rename = getrenamed(fn, rev)
4604 rename = getrenamed(fn, rev)
4605 if rename:
4605 if rename:
4606 copies.append((fn, rename[0]))
4606 copies.append((fn, rename[0]))
4607 if filematcher:
4607 if filematcher:
4608 revmatchfn = filematcher(ctx.rev())
4608 revmatchfn = filematcher(ctx.rev())
4609 else:
4609 else:
4610 revmatchfn = None
4610 revmatchfn = None
4611 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4611 displayer.show(ctx, copies=copies, matchfn=revmatchfn)
4612 if displayer.flush(ctx):
4612 if displayer.flush(ctx):
4613 count += 1
4613 count += 1
4614
4614
4615 displayer.close()
4615 displayer.close()
4616
4616
4617 @command('manifest',
4617 @command('manifest',
4618 [('r', 'rev', '', _('revision to display'), _('REV')),
4618 [('r', 'rev', '', _('revision to display'), _('REV')),
4619 ('', 'all', False, _("list files from all revisions"))]
4619 ('', 'all', False, _("list files from all revisions"))]
4620 + formatteropts,
4620 + formatteropts,
4621 _('[-r REV]'))
4621 _('[-r REV]'))
4622 def manifest(ui, repo, node=None, rev=None, **opts):
4622 def manifest(ui, repo, node=None, rev=None, **opts):
4623 """output the current or given revision of the project manifest
4623 """output the current or given revision of the project manifest
4624
4624
4625 Print a list of version controlled files for the given revision.
4625 Print a list of version controlled files for the given revision.
4626 If no revision is given, the first parent of the working directory
4626 If no revision is given, the first parent of the working directory
4627 is used, or the null revision if no revision is checked out.
4627 is used, or the null revision if no revision is checked out.
4628
4628
4629 With -v, print file permissions, symlink and executable bits.
4629 With -v, print file permissions, symlink and executable bits.
4630 With --debug, print file revision hashes.
4630 With --debug, print file revision hashes.
4631
4631
4632 If option --all is specified, the list of all files from all revisions
4632 If option --all is specified, the list of all files from all revisions
4633 is printed. This includes deleted and renamed files.
4633 is printed. This includes deleted and renamed files.
4634
4634
4635 Returns 0 on success.
4635 Returns 0 on success.
4636 """
4636 """
4637
4637
4638 fm = ui.formatter('manifest', opts)
4638 fm = ui.formatter('manifest', opts)
4639
4639
4640 if opts.get('all'):
4640 if opts.get('all'):
4641 if rev or node:
4641 if rev or node:
4642 raise error.Abort(_("can't specify a revision with --all"))
4642 raise error.Abort(_("can't specify a revision with --all"))
4643
4643
4644 res = []
4644 res = []
4645 prefix = "data/"
4645 prefix = "data/"
4646 suffix = ".i"
4646 suffix = ".i"
4647 plen = len(prefix)
4647 plen = len(prefix)
4648 slen = len(suffix)
4648 slen = len(suffix)
4649 with repo.lock():
4649 with repo.lock():
4650 for fn, b, size in repo.store.datafiles():
4650 for fn, b, size in repo.store.datafiles():
4651 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4651 if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix:
4652 res.append(fn[plen:-slen])
4652 res.append(fn[plen:-slen])
4653 for f in res:
4653 for f in res:
4654 fm.startitem()
4654 fm.startitem()
4655 fm.write("path", '%s\n', f)
4655 fm.write("path", '%s\n', f)
4656 fm.end()
4656 fm.end()
4657 return
4657 return
4658
4658
4659 if rev and node:
4659 if rev and node:
4660 raise error.Abort(_("please specify just one revision"))
4660 raise error.Abort(_("please specify just one revision"))
4661
4661
4662 if not node:
4662 if not node:
4663 node = rev
4663 node = rev
4664
4664
4665 char = {'l': '@', 'x': '*', '': ''}
4665 char = {'l': '@', 'x': '*', '': ''}
4666 mode = {'l': '644', 'x': '755', '': '644'}
4666 mode = {'l': '644', 'x': '755', '': '644'}
4667 ctx = scmutil.revsingle(repo, node)
4667 ctx = scmutil.revsingle(repo, node)
4668 mf = ctx.manifest()
4668 mf = ctx.manifest()
4669 for f in ctx:
4669 for f in ctx:
4670 fm.startitem()
4670 fm.startitem()
4671 fl = ctx[f].flags()
4671 fl = ctx[f].flags()
4672 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4672 fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f]))
4673 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4673 fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl])
4674 fm.write('path', '%s\n', f)
4674 fm.write('path', '%s\n', f)
4675 fm.end()
4675 fm.end()
4676
4676
4677 @command('^merge',
4677 @command('^merge',
4678 [('f', 'force', None,
4678 [('f', 'force', None,
4679 _('force a merge including outstanding changes (DEPRECATED)')),
4679 _('force a merge including outstanding changes (DEPRECATED)')),
4680 ('r', 'rev', '', _('revision to merge'), _('REV')),
4680 ('r', 'rev', '', _('revision to merge'), _('REV')),
4681 ('P', 'preview', None,
4681 ('P', 'preview', None,
4682 _('review revisions to merge (no merge is performed)'))
4682 _('review revisions to merge (no merge is performed)'))
4683 ] + mergetoolopts,
4683 ] + mergetoolopts,
4684 _('[-P] [[-r] REV]'))
4684 _('[-P] [[-r] REV]'))
4685 def merge(ui, repo, node=None, **opts):
4685 def merge(ui, repo, node=None, **opts):
4686 """merge another revision into working directory
4686 """merge another revision into working directory
4687
4687
4688 The current working directory is updated with all changes made in
4688 The current working directory is updated with all changes made in
4689 the requested revision since the last common predecessor revision.
4689 the requested revision since the last common predecessor revision.
4690
4690
4691 Files that changed between either parent are marked as changed for
4691 Files that changed between either parent are marked as changed for
4692 the next commit and a commit must be performed before any further
4692 the next commit and a commit must be performed before any further
4693 updates to the repository are allowed. The next commit will have
4693 updates to the repository are allowed. The next commit will have
4694 two parents.
4694 two parents.
4695
4695
4696 ``--tool`` can be used to specify the merge tool used for file
4696 ``--tool`` can be used to specify the merge tool used for file
4697 merges. It overrides the HGMERGE environment variable and your
4697 merges. It overrides the HGMERGE environment variable and your
4698 configuration files. See :hg:`help merge-tools` for options.
4698 configuration files. See :hg:`help merge-tools` for options.
4699
4699
4700 If no revision is specified, the working directory's parent is a
4700 If no revision is specified, the working directory's parent is a
4701 head revision, and the current branch contains exactly one other
4701 head revision, and the current branch contains exactly one other
4702 head, the other head is merged with by default. Otherwise, an
4702 head, the other head is merged with by default. Otherwise, an
4703 explicit revision with which to merge with must be provided.
4703 explicit revision with which to merge with must be provided.
4704
4704
4705 See :hg:`help resolve` for information on handling file conflicts.
4705 See :hg:`help resolve` for information on handling file conflicts.
4706
4706
4707 To undo an uncommitted merge, use :hg:`update --clean .` which
4707 To undo an uncommitted merge, use :hg:`update --clean .` which
4708 will check out a clean copy of the original merge parent, losing
4708 will check out a clean copy of the original merge parent, losing
4709 all changes.
4709 all changes.
4710
4710
4711 Returns 0 on success, 1 if there are unresolved files.
4711 Returns 0 on success, 1 if there are unresolved files.
4712 """
4712 """
4713
4713
4714 if opts.get('rev') and node:
4714 if opts.get('rev') and node:
4715 raise error.Abort(_("please specify just one revision"))
4715 raise error.Abort(_("please specify just one revision"))
4716 if not node:
4716 if not node:
4717 node = opts.get('rev')
4717 node = opts.get('rev')
4718
4718
4719 if node:
4719 if node:
4720 node = scmutil.revsingle(repo, node).node()
4720 node = scmutil.revsingle(repo, node).node()
4721
4721
4722 if not node:
4722 if not node:
4723 node = repo[destutil.destmerge(repo)].node()
4723 node = repo[destutil.destmerge(repo)].node()
4724
4724
4725 if opts.get('preview'):
4725 if opts.get('preview'):
4726 # find nodes that are ancestors of p2 but not of p1
4726 # find nodes that are ancestors of p2 but not of p1
4727 p1 = repo.lookup('.')
4727 p1 = repo.lookup('.')
4728 p2 = repo.lookup(node)
4728 p2 = repo.lookup(node)
4729 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4729 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
4730
4730
4731 displayer = cmdutil.show_changeset(ui, repo, opts)
4731 displayer = cmdutil.show_changeset(ui, repo, opts)
4732 for node in nodes:
4732 for node in nodes:
4733 displayer.show(repo[node])
4733 displayer.show(repo[node])
4734 displayer.close()
4734 displayer.close()
4735 return 0
4735 return 0
4736
4736
4737 try:
4737 try:
4738 # ui.forcemerge is an internal variable, do not document
4738 # ui.forcemerge is an internal variable, do not document
4739 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4739 repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge')
4740 force = opts.get('force')
4740 force = opts.get('force')
4741 labels = ['working copy', 'merge rev']
4741 labels = ['working copy', 'merge rev']
4742 return hg.merge(repo, node, force=force, mergeforce=force,
4742 return hg.merge(repo, node, force=force, mergeforce=force,
4743 labels=labels)
4743 labels=labels)
4744 finally:
4744 finally:
4745 ui.setconfig('ui', 'forcemerge', '', 'merge')
4745 ui.setconfig('ui', 'forcemerge', '', 'merge')
4746
4746
4747 @command('outgoing|out',
4747 @command('outgoing|out',
4748 [('f', 'force', None, _('run even when the destination is unrelated')),
4748 [('f', 'force', None, _('run even when the destination is unrelated')),
4749 ('r', 'rev', [],
4749 ('r', 'rev', [],
4750 _('a changeset intended to be included in the destination'), _('REV')),
4750 _('a changeset intended to be included in the destination'), _('REV')),
4751 ('n', 'newest-first', None, _('show newest record first')),
4751 ('n', 'newest-first', None, _('show newest record first')),
4752 ('B', 'bookmarks', False, _('compare bookmarks')),
4752 ('B', 'bookmarks', False, _('compare bookmarks')),
4753 ('b', 'branch', [], _('a specific branch you would like to push'),
4753 ('b', 'branch', [], _('a specific branch you would like to push'),
4754 _('BRANCH')),
4754 _('BRANCH')),
4755 ] + logopts + remoteopts + subrepoopts,
4755 ] + logopts + remoteopts + subrepoopts,
4756 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4756 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]'))
4757 def outgoing(ui, repo, dest=None, **opts):
4757 def outgoing(ui, repo, dest=None, **opts):
4758 """show changesets not found in the destination
4758 """show changesets not found in the destination
4759
4759
4760 Show changesets not found in the specified destination repository
4760 Show changesets not found in the specified destination repository
4761 or the default push location. These are the changesets that would
4761 or the default push location. These are the changesets that would
4762 be pushed if a push was requested.
4762 be pushed if a push was requested.
4763
4763
4764 See pull for details of valid destination formats.
4764 See pull for details of valid destination formats.
4765
4765
4766 .. container:: verbose
4766 .. container:: verbose
4767
4767
4768 With -B/--bookmarks, the result of bookmark comparison between
4768 With -B/--bookmarks, the result of bookmark comparison between
4769 local and remote repositories is displayed. With -v/--verbose,
4769 local and remote repositories is displayed. With -v/--verbose,
4770 status is also displayed for each bookmark like below::
4770 status is also displayed for each bookmark like below::
4771
4771
4772 BM1 01234567890a added
4772 BM1 01234567890a added
4773 BM2 deleted
4773 BM2 deleted
4774 BM3 234567890abc advanced
4774 BM3 234567890abc advanced
4775 BM4 34567890abcd diverged
4775 BM4 34567890abcd diverged
4776 BM5 4567890abcde changed
4776 BM5 4567890abcde changed
4777
4777
4778 The action taken when pushing depends on the
4778 The action taken when pushing depends on the
4779 status of each bookmark:
4779 status of each bookmark:
4780
4780
4781 :``added``: push with ``-B`` will create it
4781 :``added``: push with ``-B`` will create it
4782 :``deleted``: push with ``-B`` will delete it
4782 :``deleted``: push with ``-B`` will delete it
4783 :``advanced``: push will update it
4783 :``advanced``: push will update it
4784 :``diverged``: push with ``-B`` will update it
4784 :``diverged``: push with ``-B`` will update it
4785 :``changed``: push with ``-B`` will update it
4785 :``changed``: push with ``-B`` will update it
4786
4786
4787 From the point of view of pushing behavior, bookmarks
4787 From the point of view of pushing behavior, bookmarks
4788 existing only in the remote repository are treated as
4788 existing only in the remote repository are treated as
4789 ``deleted``, even if it is in fact added remotely.
4789 ``deleted``, even if it is in fact added remotely.
4790
4790
4791 Returns 0 if there are outgoing changes, 1 otherwise.
4791 Returns 0 if there are outgoing changes, 1 otherwise.
4792 """
4792 """
4793 if opts.get('graph'):
4793 if opts.get('graph'):
4794 cmdutil.checkunsupportedgraphflags([], opts)
4794 cmdutil.checkunsupportedgraphflags([], opts)
4795 o, other = hg._outgoing(ui, repo, dest, opts)
4795 o, other = hg._outgoing(ui, repo, dest, opts)
4796 if not o:
4796 if not o:
4797 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4797 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4798 return
4798 return
4799
4799
4800 revdag = cmdutil.graphrevs(repo, o, opts)
4800 revdag = cmdutil.graphrevs(repo, o, opts)
4801 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4801 displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
4802 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
4802 cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges)
4803 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4803 cmdutil.outgoinghooks(ui, repo, other, opts, o)
4804 return 0
4804 return 0
4805
4805
4806 if opts.get('bookmarks'):
4806 if opts.get('bookmarks'):
4807 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4807 dest = ui.expandpath(dest or 'default-push', dest or 'default')
4808 dest, branches = hg.parseurl(dest, opts.get('branch'))
4808 dest, branches = hg.parseurl(dest, opts.get('branch'))
4809 other = hg.peer(repo, opts, dest)
4809 other = hg.peer(repo, opts, dest)
4810 if 'bookmarks' not in other.listkeys('namespaces'):
4810 if 'bookmarks' not in other.listkeys('namespaces'):
4811 ui.warn(_("remote doesn't support bookmarks\n"))
4811 ui.warn(_("remote doesn't support bookmarks\n"))
4812 return 0
4812 return 0
4813 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4813 ui.status(_('comparing with %s\n') % util.hidepassword(dest))
4814 return bookmarks.outgoing(ui, repo, other)
4814 return bookmarks.outgoing(ui, repo, other)
4815
4815
4816 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4816 repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default')
4817 try:
4817 try:
4818 return hg.outgoing(ui, repo, dest, opts)
4818 return hg.outgoing(ui, repo, dest, opts)
4819 finally:
4819 finally:
4820 del repo._subtoppath
4820 del repo._subtoppath
4821
4821
4822 @command('parents',
4822 @command('parents',
4823 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4823 [('r', 'rev', '', _('show parents of the specified revision'), _('REV')),
4824 ] + templateopts,
4824 ] + templateopts,
4825 _('[-r REV] [FILE]'),
4825 _('[-r REV] [FILE]'),
4826 inferrepo=True)
4826 inferrepo=True)
4827 def parents(ui, repo, file_=None, **opts):
4827 def parents(ui, repo, file_=None, **opts):
4828 """show the parents of the working directory or revision (DEPRECATED)
4828 """show the parents of the working directory or revision (DEPRECATED)
4829
4829
4830 Print the working directory's parent revisions. If a revision is
4830 Print the working directory's parent revisions. If a revision is
4831 given via -r/--rev, the parent of that revision will be printed.
4831 given via -r/--rev, the parent of that revision will be printed.
4832 If a file argument is given, the revision in which the file was
4832 If a file argument is given, the revision in which the file was
4833 last changed (before the working directory revision or the
4833 last changed (before the working directory revision or the
4834 argument to --rev if given) is printed.
4834 argument to --rev if given) is printed.
4835
4835
4836 This command is equivalent to::
4836 This command is equivalent to::
4837
4837
4838 hg log -r "p1()+p2()" or
4838 hg log -r "p1()+p2()" or
4839 hg log -r "p1(REV)+p2(REV)" or
4839 hg log -r "p1(REV)+p2(REV)" or
4840 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4840 hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or
4841 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4841 hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))"
4842
4842
4843 See :hg:`summary` and :hg:`help revsets` for related information.
4843 See :hg:`summary` and :hg:`help revsets` for related information.
4844
4844
4845 Returns 0 on success.
4845 Returns 0 on success.
4846 """
4846 """
4847
4847
4848 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4848 ctx = scmutil.revsingle(repo, opts.get('rev'), None)
4849
4849
4850 if file_:
4850 if file_:
4851 m = scmutil.match(ctx, (file_,), opts)
4851 m = scmutil.match(ctx, (file_,), opts)
4852 if m.anypats() or len(m.files()) != 1:
4852 if m.anypats() or len(m.files()) != 1:
4853 raise error.Abort(_('can only specify an explicit filename'))
4853 raise error.Abort(_('can only specify an explicit filename'))
4854 file_ = m.files()[0]
4854 file_ = m.files()[0]
4855 filenodes = []
4855 filenodes = []
4856 for cp in ctx.parents():
4856 for cp in ctx.parents():
4857 if not cp:
4857 if not cp:
4858 continue
4858 continue
4859 try:
4859 try:
4860 filenodes.append(cp.filenode(file_))
4860 filenodes.append(cp.filenode(file_))
4861 except error.LookupError:
4861 except error.LookupError:
4862 pass
4862 pass
4863 if not filenodes:
4863 if not filenodes:
4864 raise error.Abort(_("'%s' not found in manifest!") % file_)
4864 raise error.Abort(_("'%s' not found in manifest!") % file_)
4865 p = []
4865 p = []
4866 for fn in filenodes:
4866 for fn in filenodes:
4867 fctx = repo.filectx(file_, fileid=fn)
4867 fctx = repo.filectx(file_, fileid=fn)
4868 p.append(fctx.node())
4868 p.append(fctx.node())
4869 else:
4869 else:
4870 p = [cp.node() for cp in ctx.parents()]
4870 p = [cp.node() for cp in ctx.parents()]
4871
4871
4872 displayer = cmdutil.show_changeset(ui, repo, opts)
4872 displayer = cmdutil.show_changeset(ui, repo, opts)
4873 for n in p:
4873 for n in p:
4874 if n != nullid:
4874 if n != nullid:
4875 displayer.show(repo[n])
4875 displayer.show(repo[n])
4876 displayer.close()
4876 displayer.close()
4877
4877
4878 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
4878 @command('paths', formatteropts, _('[NAME]'), optionalrepo=True)
4879 def paths(ui, repo, search=None, **opts):
4879 def paths(ui, repo, search=None, **opts):
4880 """show aliases for remote repositories
4880 """show aliases for remote repositories
4881
4881
4882 Show definition of symbolic path name NAME. If no name is given,
4882 Show definition of symbolic path name NAME. If no name is given,
4883 show definition of all available names.
4883 show definition of all available names.
4884
4884
4885 Option -q/--quiet suppresses all output when searching for NAME
4885 Option -q/--quiet suppresses all output when searching for NAME
4886 and shows only the path names when listing all definitions.
4886 and shows only the path names when listing all definitions.
4887
4887
4888 Path names are defined in the [paths] section of your
4888 Path names are defined in the [paths] section of your
4889 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4889 configuration file and in ``/etc/mercurial/hgrc``. If run inside a
4890 repository, ``.hg/hgrc`` is used, too.
4890 repository, ``.hg/hgrc`` is used, too.
4891
4891
4892 The path names ``default`` and ``default-push`` have a special
4892 The path names ``default`` and ``default-push`` have a special
4893 meaning. When performing a push or pull operation, they are used
4893 meaning. When performing a push or pull operation, they are used
4894 as fallbacks if no location is specified on the command-line.
4894 as fallbacks if no location is specified on the command-line.
4895 When ``default-push`` is set, it will be used for push and
4895 When ``default-push`` is set, it will be used for push and
4896 ``default`` will be used for pull; otherwise ``default`` is used
4896 ``default`` will be used for pull; otherwise ``default`` is used
4897 as the fallback for both. When cloning a repository, the clone
4897 as the fallback for both. When cloning a repository, the clone
4898 source is written as ``default`` in ``.hg/hgrc``.
4898 source is written as ``default`` in ``.hg/hgrc``.
4899
4899
4900 .. note::
4900 .. note::
4901
4901
4902 ``default`` and ``default-push`` apply to all inbound (e.g.
4902 ``default`` and ``default-push`` apply to all inbound (e.g.
4903 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
4903 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email`
4904 and :hg:`bundle`) operations.
4904 and :hg:`bundle`) operations.
4905
4905
4906 See :hg:`help urls` for more information.
4906 See :hg:`help urls` for more information.
4907
4907
4908 Returns 0 on success.
4908 Returns 0 on success.
4909 """
4909 """
4910 if search:
4910 if search:
4911 pathitems = [(name, path) for name, path in ui.paths.iteritems()
4911 pathitems = [(name, path) for name, path in ui.paths.iteritems()
4912 if name == search]
4912 if name == search]
4913 else:
4913 else:
4914 pathitems = sorted(ui.paths.iteritems())
4914 pathitems = sorted(ui.paths.iteritems())
4915
4915
4916 fm = ui.formatter('paths', opts)
4916 fm = ui.formatter('paths', opts)
4917 if fm.isplain():
4917 if fm.isplain():
4918 hidepassword = util.hidepassword
4918 hidepassword = util.hidepassword
4919 else:
4919 else:
4920 hidepassword = str
4920 hidepassword = str
4921 if ui.quiet:
4921 if ui.quiet:
4922 namefmt = '%s\n'
4922 namefmt = '%s\n'
4923 else:
4923 else:
4924 namefmt = '%s = '
4924 namefmt = '%s = '
4925 showsubopts = not search and not ui.quiet
4925 showsubopts = not search and not ui.quiet
4926
4926
4927 for name, path in pathitems:
4927 for name, path in pathitems:
4928 fm.startitem()
4928 fm.startitem()
4929 fm.condwrite(not search, 'name', namefmt, name)
4929 fm.condwrite(not search, 'name', namefmt, name)
4930 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
4930 fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc))
4931 for subopt, value in sorted(path.suboptions.items()):
4931 for subopt, value in sorted(path.suboptions.items()):
4932 assert subopt not in ('name', 'url')
4932 assert subopt not in ('name', 'url')
4933 if showsubopts:
4933 if showsubopts:
4934 fm.plain('%s:%s = ' % (name, subopt))
4934 fm.plain('%s:%s = ' % (name, subopt))
4935 fm.condwrite(showsubopts, subopt, '%s\n', value)
4935 fm.condwrite(showsubopts, subopt, '%s\n', value)
4936
4936
4937 fm.end()
4937 fm.end()
4938
4938
4939 if search and not pathitems:
4939 if search and not pathitems:
4940 if not ui.quiet:
4940 if not ui.quiet:
4941 ui.warn(_("not found!\n"))
4941 ui.warn(_("not found!\n"))
4942 return 1
4942 return 1
4943 else:
4943 else:
4944 return 0
4944 return 0
4945
4945
4946 @command('phase',
4946 @command('phase',
4947 [('p', 'public', False, _('set changeset phase to public')),
4947 [('p', 'public', False, _('set changeset phase to public')),
4948 ('d', 'draft', False, _('set changeset phase to draft')),
4948 ('d', 'draft', False, _('set changeset phase to draft')),
4949 ('s', 'secret', False, _('set changeset phase to secret')),
4949 ('s', 'secret', False, _('set changeset phase to secret')),
4950 ('f', 'force', False, _('allow to move boundary backward')),
4950 ('f', 'force', False, _('allow to move boundary backward')),
4951 ('r', 'rev', [], _('target revision'), _('REV')),
4951 ('r', 'rev', [], _('target revision'), _('REV')),
4952 ],
4952 ],
4953 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4953 _('[-p|-d|-s] [-f] [-r] [REV...]'))
4954 def phase(ui, repo, *revs, **opts):
4954 def phase(ui, repo, *revs, **opts):
4955 """set or show the current phase name
4955 """set or show the current phase name
4956
4956
4957 With no argument, show the phase name of the current revision(s).
4957 With no argument, show the phase name of the current revision(s).
4958
4958
4959 With one of -p/--public, -d/--draft or -s/--secret, change the
4959 With one of -p/--public, -d/--draft or -s/--secret, change the
4960 phase value of the specified revisions.
4960 phase value of the specified revisions.
4961
4961
4962 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4962 Unless -f/--force is specified, :hg:`phase` won't move changeset from a
4963 lower phase to an higher phase. Phases are ordered as follows::
4963 lower phase to an higher phase. Phases are ordered as follows::
4964
4964
4965 public < draft < secret
4965 public < draft < secret
4966
4966
4967 Returns 0 on success, 1 if some phases could not be changed.
4967 Returns 0 on success, 1 if some phases could not be changed.
4968
4968
4969 (For more information about the phases concept, see :hg:`help phases`.)
4969 (For more information about the phases concept, see :hg:`help phases`.)
4970 """
4970 """
4971 # search for a unique phase argument
4971 # search for a unique phase argument
4972 targetphase = None
4972 targetphase = None
4973 for idx, name in enumerate(phases.phasenames):
4973 for idx, name in enumerate(phases.phasenames):
4974 if opts[name]:
4974 if opts[name]:
4975 if targetphase is not None:
4975 if targetphase is not None:
4976 raise error.Abort(_('only one phase can be specified'))
4976 raise error.Abort(_('only one phase can be specified'))
4977 targetphase = idx
4977 targetphase = idx
4978
4978
4979 # look for specified revision
4979 # look for specified revision
4980 revs = list(revs)
4980 revs = list(revs)
4981 revs.extend(opts['rev'])
4981 revs.extend(opts['rev'])
4982 if not revs:
4982 if not revs:
4983 # display both parents as the second parent phase can influence
4983 # display both parents as the second parent phase can influence
4984 # the phase of a merge commit
4984 # the phase of a merge commit
4985 revs = [c.rev() for c in repo[None].parents()]
4985 revs = [c.rev() for c in repo[None].parents()]
4986
4986
4987 revs = scmutil.revrange(repo, revs)
4987 revs = scmutil.revrange(repo, revs)
4988
4988
4989 lock = None
4989 lock = None
4990 ret = 0
4990 ret = 0
4991 if targetphase is None:
4991 if targetphase is None:
4992 # display
4992 # display
4993 for r in revs:
4993 for r in revs:
4994 ctx = repo[r]
4994 ctx = repo[r]
4995 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4995 ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
4996 else:
4996 else:
4997 tr = None
4997 tr = None
4998 lock = repo.lock()
4998 lock = repo.lock()
4999 try:
4999 try:
5000 tr = repo.transaction("phase")
5000 tr = repo.transaction("phase")
5001 # set phase
5001 # set phase
5002 if not revs:
5002 if not revs:
5003 raise error.Abort(_('empty revision set'))
5003 raise error.Abort(_('empty revision set'))
5004 nodes = [repo[r].node() for r in revs]
5004 nodes = [repo[r].node() for r in revs]
5005 # moving revision from public to draft may hide them
5005 # moving revision from public to draft may hide them
5006 # We have to check result on an unfiltered repository
5006 # We have to check result on an unfiltered repository
5007 unfi = repo.unfiltered()
5007 unfi = repo.unfiltered()
5008 getphase = unfi._phasecache.phase
5008 getphase = unfi._phasecache.phase
5009 olddata = [getphase(unfi, r) for r in unfi]
5009 olddata = [getphase(unfi, r) for r in unfi]
5010 phases.advanceboundary(repo, tr, targetphase, nodes)
5010 phases.advanceboundary(repo, tr, targetphase, nodes)
5011 if opts['force']:
5011 if opts['force']:
5012 phases.retractboundary(repo, tr, targetphase, nodes)
5012 phases.retractboundary(repo, tr, targetphase, nodes)
5013 tr.close()
5013 tr.close()
5014 finally:
5014 finally:
5015 if tr is not None:
5015 if tr is not None:
5016 tr.release()
5016 tr.release()
5017 lock.release()
5017 lock.release()
5018 getphase = unfi._phasecache.phase
5018 getphase = unfi._phasecache.phase
5019 newdata = [getphase(unfi, r) for r in unfi]
5019 newdata = [getphase(unfi, r) for r in unfi]
5020 changes = sum(newdata[r] != olddata[r] for r in unfi)
5020 changes = sum(newdata[r] != olddata[r] for r in unfi)
5021 cl = unfi.changelog
5021 cl = unfi.changelog
5022 rejected = [n for n in nodes
5022 rejected = [n for n in nodes
5023 if newdata[cl.rev(n)] < targetphase]
5023 if newdata[cl.rev(n)] < targetphase]
5024 if rejected:
5024 if rejected:
5025 ui.warn(_('cannot move %i changesets to a higher '
5025 ui.warn(_('cannot move %i changesets to a higher '
5026 'phase, use --force\n') % len(rejected))
5026 'phase, use --force\n') % len(rejected))
5027 ret = 1
5027 ret = 1
5028 if changes:
5028 if changes:
5029 msg = _('phase changed for %i changesets\n') % changes
5029 msg = _('phase changed for %i changesets\n') % changes
5030 if ret:
5030 if ret:
5031 ui.status(msg)
5031 ui.status(msg)
5032 else:
5032 else:
5033 ui.note(msg)
5033 ui.note(msg)
5034 else:
5034 else:
5035 ui.warn(_('no phases changed\n'))
5035 ui.warn(_('no phases changed\n'))
5036 return ret
5036 return ret
5037
5037
5038 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5038 def postincoming(ui, repo, modheads, optupdate, checkout, brev):
5039 """Run after a changegroup has been added via pull/unbundle
5039 """Run after a changegroup has been added via pull/unbundle
5040
5040
5041 This takes arguments below:
5041 This takes arguments below:
5042
5042
5043 :modheads: change of heads by pull/unbundle
5043 :modheads: change of heads by pull/unbundle
5044 :optupdate: updating working directory is needed or not
5044 :optupdate: updating working directory is needed or not
5045 :checkout: update destination revision (or None to default destination)
5045 :checkout: update destination revision (or None to default destination)
5046 :brev: a name, which might be a bookmark to be activated after updating
5046 :brev: a name, which might be a bookmark to be activated after updating
5047 """
5047 """
5048 if modheads == 0:
5048 if modheads == 0:
5049 return
5049 return
5050 if optupdate:
5050 if optupdate:
5051 try:
5051 try:
5052 return hg.updatetotally(ui, repo, checkout, brev)
5052 return hg.updatetotally(ui, repo, checkout, brev)
5053 except error.UpdateAbort as inst:
5053 except error.UpdateAbort as inst:
5054 msg = _("not updating: %s") % str(inst)
5054 msg = _("not updating: %s") % str(inst)
5055 hint = inst.hint
5055 hint = inst.hint
5056 raise error.UpdateAbort(msg, hint=hint)
5056 raise error.UpdateAbort(msg, hint=hint)
5057 if modheads > 1:
5057 if modheads > 1:
5058 currentbranchheads = len(repo.branchheads())
5058 currentbranchheads = len(repo.branchheads())
5059 if currentbranchheads == modheads:
5059 if currentbranchheads == modheads:
5060 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5060 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
5061 elif currentbranchheads > 1:
5061 elif currentbranchheads > 1:
5062 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5062 ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
5063 "merge)\n"))
5063 "merge)\n"))
5064 else:
5064 else:
5065 ui.status(_("(run 'hg heads' to see heads)\n"))
5065 ui.status(_("(run 'hg heads' to see heads)\n"))
5066 else:
5066 else:
5067 ui.status(_("(run 'hg update' to get a working copy)\n"))
5067 ui.status(_("(run 'hg update' to get a working copy)\n"))
5068
5068
5069 @command('^pull',
5069 @command('^pull',
5070 [('u', 'update', None,
5070 [('u', 'update', None,
5071 _('update to new branch head if changesets were pulled')),
5071 _('update to new branch head if changesets were pulled')),
5072 ('f', 'force', None, _('run even when remote repository is unrelated')),
5072 ('f', 'force', None, _('run even when remote repository is unrelated')),
5073 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5073 ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')),
5074 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5074 ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')),
5075 ('b', 'branch', [], _('a specific branch you would like to pull'),
5075 ('b', 'branch', [], _('a specific branch you would like to pull'),
5076 _('BRANCH')),
5076 _('BRANCH')),
5077 ] + remoteopts,
5077 ] + remoteopts,
5078 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5078 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]'))
5079 def pull(ui, repo, source="default", **opts):
5079 def pull(ui, repo, source="default", **opts):
5080 """pull changes from the specified source
5080 """pull changes from the specified source
5081
5081
5082 Pull changes from a remote repository to a local one.
5082 Pull changes from a remote repository to a local one.
5083
5083
5084 This finds all changes from the repository at the specified path
5084 This finds all changes from the repository at the specified path
5085 or URL and adds them to a local repository (the current one unless
5085 or URL and adds them to a local repository (the current one unless
5086 -R is specified). By default, this does not update the copy of the
5086 -R is specified). By default, this does not update the copy of the
5087 project in the working directory.
5087 project in the working directory.
5088
5088
5089 Use :hg:`incoming` if you want to see what would have been added
5089 Use :hg:`incoming` if you want to see what would have been added
5090 by a pull at the time you issued this command. If you then decide
5090 by a pull at the time you issued this command. If you then decide
5091 to add those changes to the repository, you should use :hg:`pull
5091 to add those changes to the repository, you should use :hg:`pull
5092 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5092 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
5093
5093
5094 If SOURCE is omitted, the 'default' path will be used.
5094 If SOURCE is omitted, the 'default' path will be used.
5095 See :hg:`help urls` for more information.
5095 See :hg:`help urls` for more information.
5096
5096
5097 Specifying bookmark as ``.`` is equivalent to specifying the active
5097 Specifying bookmark as ``.`` is equivalent to specifying the active
5098 bookmark's name.
5098 bookmark's name.
5099
5099
5100 Returns 0 on success, 1 if an update had unresolved files.
5100 Returns 0 on success, 1 if an update had unresolved files.
5101 """
5101 """
5102 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5102 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
5103 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5103 ui.status(_('pulling from %s\n') % util.hidepassword(source))
5104 other = hg.peer(repo, opts, source)
5104 other = hg.peer(repo, opts, source)
5105 try:
5105 try:
5106 revs, checkout = hg.addbranchrevs(repo, other, branches,
5106 revs, checkout = hg.addbranchrevs(repo, other, branches,
5107 opts.get('rev'))
5107 opts.get('rev'))
5108
5108
5109
5109
5110 pullopargs = {}
5110 pullopargs = {}
5111 if opts.get('bookmark'):
5111 if opts.get('bookmark'):
5112 if not revs:
5112 if not revs:
5113 revs = []
5113 revs = []
5114 # The list of bookmark used here is not the one used to actually
5114 # The list of bookmark used here is not the one used to actually
5115 # update the bookmark name. This can result in the revision pulled
5115 # update the bookmark name. This can result in the revision pulled
5116 # not ending up with the name of the bookmark because of a race
5116 # not ending up with the name of the bookmark because of a race
5117 # condition on the server. (See issue 4689 for details)
5117 # condition on the server. (See issue 4689 for details)
5118 remotebookmarks = other.listkeys('bookmarks')
5118 remotebookmarks = other.listkeys('bookmarks')
5119 pullopargs['remotebookmarks'] = remotebookmarks
5119 pullopargs['remotebookmarks'] = remotebookmarks
5120 for b in opts['bookmark']:
5120 for b in opts['bookmark']:
5121 b = repo._bookmarks.expandname(b)
5121 b = repo._bookmarks.expandname(b)
5122 if b not in remotebookmarks:
5122 if b not in remotebookmarks:
5123 raise error.Abort(_('remote bookmark %s not found!') % b)
5123 raise error.Abort(_('remote bookmark %s not found!') % b)
5124 revs.append(remotebookmarks[b])
5124 revs.append(remotebookmarks[b])
5125
5125
5126 if revs:
5126 if revs:
5127 try:
5127 try:
5128 # When 'rev' is a bookmark name, we cannot guarantee that it
5128 # When 'rev' is a bookmark name, we cannot guarantee that it
5129 # will be updated with that name because of a race condition
5129 # will be updated with that name because of a race condition
5130 # server side. (See issue 4689 for details)
5130 # server side. (See issue 4689 for details)
5131 oldrevs = revs
5131 oldrevs = revs
5132 revs = [] # actually, nodes
5132 revs = [] # actually, nodes
5133 for r in oldrevs:
5133 for r in oldrevs:
5134 node = other.lookup(r)
5134 node = other.lookup(r)
5135 revs.append(node)
5135 revs.append(node)
5136 if r == checkout:
5136 if r == checkout:
5137 checkout = node
5137 checkout = node
5138 except error.CapabilityError:
5138 except error.CapabilityError:
5139 err = _("other repository doesn't support revision lookup, "
5139 err = _("other repository doesn't support revision lookup, "
5140 "so a rev cannot be specified.")
5140 "so a rev cannot be specified.")
5141 raise error.Abort(err)
5141 raise error.Abort(err)
5142
5142
5143 pullopargs.update(opts.get('opargs', {}))
5143 pullopargs.update(opts.get('opargs', {}))
5144 modheads = exchange.pull(repo, other, heads=revs,
5144 modheads = exchange.pull(repo, other, heads=revs,
5145 force=opts.get('force'),
5145 force=opts.get('force'),
5146 bookmarks=opts.get('bookmark', ()),
5146 bookmarks=opts.get('bookmark', ()),
5147 opargs=pullopargs).cgresult
5147 opargs=pullopargs).cgresult
5148
5148
5149 # brev is a name, which might be a bookmark to be activated at
5149 # brev is a name, which might be a bookmark to be activated at
5150 # the end of the update. In other words, it is an explicit
5150 # the end of the update. In other words, it is an explicit
5151 # destination of the update
5151 # destination of the update
5152 brev = None
5152 brev = None
5153
5153
5154 if checkout:
5154 if checkout:
5155 checkout = str(repo.changelog.rev(checkout))
5155 checkout = str(repo.changelog.rev(checkout))
5156
5156
5157 # order below depends on implementation of
5157 # order below depends on implementation of
5158 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5158 # hg.addbranchrevs(). opts['bookmark'] is ignored,
5159 # because 'checkout' is determined without it.
5159 # because 'checkout' is determined without it.
5160 if opts.get('rev'):
5160 if opts.get('rev'):
5161 brev = opts['rev'][0]
5161 brev = opts['rev'][0]
5162 elif opts.get('branch'):
5162 elif opts.get('branch'):
5163 brev = opts['branch'][0]
5163 brev = opts['branch'][0]
5164 else:
5164 else:
5165 brev = branches[0]
5165 brev = branches[0]
5166 repo._subtoppath = source
5166 repo._subtoppath = source
5167 try:
5167 try:
5168 ret = postincoming(ui, repo, modheads, opts.get('update'),
5168 ret = postincoming(ui, repo, modheads, opts.get('update'),
5169 checkout, brev)
5169 checkout, brev)
5170
5170
5171 finally:
5171 finally:
5172 del repo._subtoppath
5172 del repo._subtoppath
5173
5173
5174 finally:
5174 finally:
5175 other.close()
5175 other.close()
5176 return ret
5176 return ret
5177
5177
5178 @command('^push',
5178 @command('^push',
5179 [('f', 'force', None, _('force push')),
5179 [('f', 'force', None, _('force push')),
5180 ('r', 'rev', [],
5180 ('r', 'rev', [],
5181 _('a changeset intended to be included in the destination'),
5181 _('a changeset intended to be included in the destination'),
5182 _('REV')),
5182 _('REV')),
5183 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5183 ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')),
5184 ('b', 'branch', [],
5184 ('b', 'branch', [],
5185 _('a specific branch you would like to push'), _('BRANCH')),
5185 _('a specific branch you would like to push'), _('BRANCH')),
5186 ('', 'new-branch', False, _('allow pushing a new branch')),
5186 ('', 'new-branch', False, _('allow pushing a new branch')),
5187 ] + remoteopts,
5187 ] + remoteopts,
5188 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5188 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]'))
5189 def push(ui, repo, dest=None, **opts):
5189 def push(ui, repo, dest=None, **opts):
5190 """push changes to the specified destination
5190 """push changes to the specified destination
5191
5191
5192 Push changesets from the local repository to the specified
5192 Push changesets from the local repository to the specified
5193 destination.
5193 destination.
5194
5194
5195 This operation is symmetrical to pull: it is identical to a pull
5195 This operation is symmetrical to pull: it is identical to a pull
5196 in the destination repository from the current one.
5196 in the destination repository from the current one.
5197
5197
5198 By default, push will not allow creation of new heads at the
5198 By default, push will not allow creation of new heads at the
5199 destination, since multiple heads would make it unclear which head
5199 destination, since multiple heads would make it unclear which head
5200 to use. In this situation, it is recommended to pull and merge
5200 to use. In this situation, it is recommended to pull and merge
5201 before pushing.
5201 before pushing.
5202
5202
5203 Use --new-branch if you want to allow push to create a new named
5203 Use --new-branch if you want to allow push to create a new named
5204 branch that is not present at the destination. This allows you to
5204 branch that is not present at the destination. This allows you to
5205 only create a new branch without forcing other changes.
5205 only create a new branch without forcing other changes.
5206
5206
5207 .. note::
5207 .. note::
5208
5208
5209 Extra care should be taken with the -f/--force option,
5209 Extra care should be taken with the -f/--force option,
5210 which will push all new heads on all branches, an action which will
5210 which will push all new heads on all branches, an action which will
5211 almost always cause confusion for collaborators.
5211 almost always cause confusion for collaborators.
5212
5212
5213 If -r/--rev is used, the specified revision and all its ancestors
5213 If -r/--rev is used, the specified revision and all its ancestors
5214 will be pushed to the remote repository.
5214 will be pushed to the remote repository.
5215
5215
5216 If -B/--bookmark is used, the specified bookmarked revision, its
5216 If -B/--bookmark is used, the specified bookmarked revision, its
5217 ancestors, and the bookmark will be pushed to the remote
5217 ancestors, and the bookmark will be pushed to the remote
5218 repository. Specifying ``.`` is equivalent to specifying the active
5218 repository. Specifying ``.`` is equivalent to specifying the active
5219 bookmark's name.
5219 bookmark's name.
5220
5220
5221 Please see :hg:`help urls` for important details about ``ssh://``
5221 Please see :hg:`help urls` for important details about ``ssh://``
5222 URLs. If DESTINATION is omitted, a default path will be used.
5222 URLs. If DESTINATION is omitted, a default path will be used.
5223
5223
5224 Returns 0 if push was successful, 1 if nothing to push.
5224 Returns 0 if push was successful, 1 if nothing to push.
5225 """
5225 """
5226
5226
5227 if opts.get('bookmark'):
5227 if opts.get('bookmark'):
5228 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5228 ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push')
5229 for b in opts['bookmark']:
5229 for b in opts['bookmark']:
5230 # translate -B options to -r so changesets get pushed
5230 # translate -B options to -r so changesets get pushed
5231 b = repo._bookmarks.expandname(b)
5231 b = repo._bookmarks.expandname(b)
5232 if b in repo._bookmarks:
5232 if b in repo._bookmarks:
5233 opts.setdefault('rev', []).append(b)
5233 opts.setdefault('rev', []).append(b)
5234 else:
5234 else:
5235 # if we try to push a deleted bookmark, translate it to null
5235 # if we try to push a deleted bookmark, translate it to null
5236 # this lets simultaneous -r, -b options continue working
5236 # this lets simultaneous -r, -b options continue working
5237 opts.setdefault('rev', []).append("null")
5237 opts.setdefault('rev', []).append("null")
5238
5238
5239 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5239 path = ui.paths.getpath(dest, default=('default-push', 'default'))
5240 if not path:
5240 if not path:
5241 raise error.Abort(_('default repository not configured!'),
5241 raise error.Abort(_('default repository not configured!'),
5242 hint=_("see 'hg help config.paths'"))
5242 hint=_("see 'hg help config.paths'"))
5243 dest = path.pushloc or path.loc
5243 dest = path.pushloc or path.loc
5244 branches = (path.branch, opts.get('branch') or [])
5244 branches = (path.branch, opts.get('branch') or [])
5245 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5245 ui.status(_('pushing to %s\n') % util.hidepassword(dest))
5246 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5246 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
5247 other = hg.peer(repo, opts, dest)
5247 other = hg.peer(repo, opts, dest)
5248
5248
5249 if revs:
5249 if revs:
5250 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5250 revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
5251 if not revs:
5251 if not revs:
5252 raise error.Abort(_("specified revisions evaluate to an empty set"),
5252 raise error.Abort(_("specified revisions evaluate to an empty set"),
5253 hint=_("use different revision arguments"))
5253 hint=_("use different revision arguments"))
5254 elif path.pushrev:
5254 elif path.pushrev:
5255 # It doesn't make any sense to specify ancestor revisions. So limit
5255 # It doesn't make any sense to specify ancestor revisions. So limit
5256 # to DAG heads to make discovery simpler.
5256 # to DAG heads to make discovery simpler.
5257 expr = revset.formatspec('heads(%r)', path.pushrev)
5257 expr = revset.formatspec('heads(%r)', path.pushrev)
5258 revs = scmutil.revrange(repo, [expr])
5258 revs = scmutil.revrange(repo, [expr])
5259 revs = [repo[rev].node() for rev in revs]
5259 revs = [repo[rev].node() for rev in revs]
5260 if not revs:
5260 if not revs:
5261 raise error.Abort(_('default push revset for path evaluates to an '
5261 raise error.Abort(_('default push revset for path evaluates to an '
5262 'empty set'))
5262 'empty set'))
5263
5263
5264 repo._subtoppath = dest
5264 repo._subtoppath = dest
5265 try:
5265 try:
5266 # push subrepos depth-first for coherent ordering
5266 # push subrepos depth-first for coherent ordering
5267 c = repo['']
5267 c = repo['']
5268 subs = c.substate # only repos that are committed
5268 subs = c.substate # only repos that are committed
5269 for s in sorted(subs):
5269 for s in sorted(subs):
5270 result = c.sub(s).push(opts)
5270 result = c.sub(s).push(opts)
5271 if result == 0:
5271 if result == 0:
5272 return not result
5272 return not result
5273 finally:
5273 finally:
5274 del repo._subtoppath
5274 del repo._subtoppath
5275 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5275 pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
5276 newbranch=opts.get('new_branch'),
5276 newbranch=opts.get('new_branch'),
5277 bookmarks=opts.get('bookmark', ()),
5277 bookmarks=opts.get('bookmark', ()),
5278 opargs=opts.get('opargs'))
5278 opargs=opts.get('opargs'))
5279
5279
5280 result = not pushop.cgresult
5280 result = not pushop.cgresult
5281
5281
5282 if pushop.bkresult is not None:
5282 if pushop.bkresult is not None:
5283 if pushop.bkresult == 2:
5283 if pushop.bkresult == 2:
5284 result = 2
5284 result = 2
5285 elif not result and pushop.bkresult:
5285 elif not result and pushop.bkresult:
5286 result = 2
5286 result = 2
5287
5287
5288 return result
5288 return result
5289
5289
5290 @command('recover', [])
5290 @command('recover', [])
5291 def recover(ui, repo):
5291 def recover(ui, repo):
5292 """roll back an interrupted transaction
5292 """roll back an interrupted transaction
5293
5293
5294 Recover from an interrupted commit or pull.
5294 Recover from an interrupted commit or pull.
5295
5295
5296 This command tries to fix the repository status after an
5296 This command tries to fix the repository status after an
5297 interrupted operation. It should only be necessary when Mercurial
5297 interrupted operation. It should only be necessary when Mercurial
5298 suggests it.
5298 suggests it.
5299
5299
5300 Returns 0 if successful, 1 if nothing to recover or verify fails.
5300 Returns 0 if successful, 1 if nothing to recover or verify fails.
5301 """
5301 """
5302 if repo.recover():
5302 if repo.recover():
5303 return hg.verify(repo)
5303 return hg.verify(repo)
5304 return 1
5304 return 1
5305
5305
5306 @command('^remove|rm',
5306 @command('^remove|rm',
5307 [('A', 'after', None, _('record delete for missing files')),
5307 [('A', 'after', None, _('record delete for missing files')),
5308 ('f', 'force', None,
5308 ('f', 'force', None,
5309 _('forget added files, delete modified files')),
5309 _('forget added files, delete modified files')),
5310 ] + subrepoopts + walkopts,
5310 ] + subrepoopts + walkopts,
5311 _('[OPTION]... FILE...'),
5311 _('[OPTION]... FILE...'),
5312 inferrepo=True)
5312 inferrepo=True)
5313 def remove(ui, repo, *pats, **opts):
5313 def remove(ui, repo, *pats, **opts):
5314 """remove the specified files on the next commit
5314 """remove the specified files on the next commit
5315
5315
5316 Schedule the indicated files for removal from the current branch.
5316 Schedule the indicated files for removal from the current branch.
5317
5317
5318 This command schedules the files to be removed at the next commit.
5318 This command schedules the files to be removed at the next commit.
5319 To undo a remove before that, see :hg:`revert`. To undo added
5319 To undo a remove before that, see :hg:`revert`. To undo added
5320 files, see :hg:`forget`.
5320 files, see :hg:`forget`.
5321
5321
5322 .. container:: verbose
5322 .. container:: verbose
5323
5323
5324 -A/--after can be used to remove only files that have already
5324 -A/--after can be used to remove only files that have already
5325 been deleted, -f/--force can be used to force deletion, and -Af
5325 been deleted, -f/--force can be used to force deletion, and -Af
5326 can be used to remove files from the next revision without
5326 can be used to remove files from the next revision without
5327 deleting them from the working directory.
5327 deleting them from the working directory.
5328
5328
5329 The following table details the behavior of remove for different
5329 The following table details the behavior of remove for different
5330 file states (columns) and option combinations (rows). The file
5330 file states (columns) and option combinations (rows). The file
5331 states are Added [A], Clean [C], Modified [M] and Missing [!]
5331 states are Added [A], Clean [C], Modified [M] and Missing [!]
5332 (as reported by :hg:`status`). The actions are Warn, Remove
5332 (as reported by :hg:`status`). The actions are Warn, Remove
5333 (from branch) and Delete (from disk):
5333 (from branch) and Delete (from disk):
5334
5334
5335 ========= == == == ==
5335 ========= == == == ==
5336 opt/state A C M !
5336 opt/state A C M !
5337 ========= == == == ==
5337 ========= == == == ==
5338 none W RD W R
5338 none W RD W R
5339 -f R RD RD R
5339 -f R RD RD R
5340 -A W W W R
5340 -A W W W R
5341 -Af R R R R
5341 -Af R R R R
5342 ========= == == == ==
5342 ========= == == == ==
5343
5343
5344 .. note::
5344 .. note::
5345
5345
5346 :hg:`remove` never deletes files in Added [A] state from the
5346 :hg:`remove` never deletes files in Added [A] state from the
5347 working directory, not even if ``--force`` is specified.
5347 working directory, not even if ``--force`` is specified.
5348
5348
5349 Returns 0 on success, 1 if any warnings encountered.
5349 Returns 0 on success, 1 if any warnings encountered.
5350 """
5350 """
5351
5351
5352 after, force = opts.get('after'), opts.get('force')
5352 after, force = opts.get('after'), opts.get('force')
5353 if not pats and not after:
5353 if not pats and not after:
5354 raise error.Abort(_('no files specified'))
5354 raise error.Abort(_('no files specified'))
5355
5355
5356 m = scmutil.match(repo[None], pats, opts)
5356 m = scmutil.match(repo[None], pats, opts)
5357 subrepos = opts.get('subrepos')
5357 subrepos = opts.get('subrepos')
5358 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5358 return cmdutil.remove(ui, repo, m, "", after, force, subrepos)
5359
5359
5360 @command('rename|move|mv',
5360 @command('rename|move|mv',
5361 [('A', 'after', None, _('record a rename that has already occurred')),
5361 [('A', 'after', None, _('record a rename that has already occurred')),
5362 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5362 ('f', 'force', None, _('forcibly copy over an existing managed file')),
5363 ] + walkopts + dryrunopts,
5363 ] + walkopts + dryrunopts,
5364 _('[OPTION]... SOURCE... DEST'))
5364 _('[OPTION]... SOURCE... DEST'))
5365 def rename(ui, repo, *pats, **opts):
5365 def rename(ui, repo, *pats, **opts):
5366 """rename files; equivalent of copy + remove
5366 """rename files; equivalent of copy + remove
5367
5367
5368 Mark dest as copies of sources; mark sources for deletion. If dest
5368 Mark dest as copies of sources; mark sources for deletion. If dest
5369 is a directory, copies are put in that directory. If dest is a
5369 is a directory, copies are put in that directory. If dest is a
5370 file, there can only be one source.
5370 file, there can only be one source.
5371
5371
5372 By default, this command copies the contents of files as they
5372 By default, this command copies the contents of files as they
5373 exist in the working directory. If invoked with -A/--after, the
5373 exist in the working directory. If invoked with -A/--after, the
5374 operation is recorded, but no copying is performed.
5374 operation is recorded, but no copying is performed.
5375
5375
5376 This command takes effect at the next commit. To undo a rename
5376 This command takes effect at the next commit. To undo a rename
5377 before that, see :hg:`revert`.
5377 before that, see :hg:`revert`.
5378
5378
5379 Returns 0 on success, 1 if errors are encountered.
5379 Returns 0 on success, 1 if errors are encountered.
5380 """
5380 """
5381 with repo.wlock(False):
5381 with repo.wlock(False):
5382 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5382 return cmdutil.copy(ui, repo, pats, opts, rename=True)
5383
5383
5384 @command('resolve',
5384 @command('resolve',
5385 [('a', 'all', None, _('select all unresolved files')),
5385 [('a', 'all', None, _('select all unresolved files')),
5386 ('l', 'list', None, _('list state of files needing merge')),
5386 ('l', 'list', None, _('list state of files needing merge')),
5387 ('m', 'mark', None, _('mark files as resolved')),
5387 ('m', 'mark', None, _('mark files as resolved')),
5388 ('u', 'unmark', None, _('mark files as unresolved')),
5388 ('u', 'unmark', None, _('mark files as unresolved')),
5389 ('n', 'no-status', None, _('hide status prefix'))]
5389 ('n', 'no-status', None, _('hide status prefix'))]
5390 + mergetoolopts + walkopts + formatteropts,
5390 + mergetoolopts + walkopts + formatteropts,
5391 _('[OPTION]... [FILE]...'),
5391 _('[OPTION]... [FILE]...'),
5392 inferrepo=True)
5392 inferrepo=True)
5393 def resolve(ui, repo, *pats, **opts):
5393 def resolve(ui, repo, *pats, **opts):
5394 """redo merges or set/view the merge status of files
5394 """redo merges or set/view the merge status of files
5395
5395
5396 Merges with unresolved conflicts are often the result of
5396 Merges with unresolved conflicts are often the result of
5397 non-interactive merging using the ``internal:merge`` configuration
5397 non-interactive merging using the ``internal:merge`` configuration
5398 setting, or a command-line merge tool like ``diff3``. The resolve
5398 setting, or a command-line merge tool like ``diff3``. The resolve
5399 command is used to manage the files involved in a merge, after
5399 command is used to manage the files involved in a merge, after
5400 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5400 :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
5401 working directory must have two parents). See :hg:`help
5401 working directory must have two parents). See :hg:`help
5402 merge-tools` for information on configuring merge tools.
5402 merge-tools` for information on configuring merge tools.
5403
5403
5404 The resolve command can be used in the following ways:
5404 The resolve command can be used in the following ways:
5405
5405
5406 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5406 - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified
5407 files, discarding any previous merge attempts. Re-merging is not
5407 files, discarding any previous merge attempts. Re-merging is not
5408 performed for files already marked as resolved. Use ``--all/-a``
5408 performed for files already marked as resolved. Use ``--all/-a``
5409 to select all unresolved files. ``--tool`` can be used to specify
5409 to select all unresolved files. ``--tool`` can be used to specify
5410 the merge tool used for the given files. It overrides the HGMERGE
5410 the merge tool used for the given files. It overrides the HGMERGE
5411 environment variable and your configuration files. Previous file
5411 environment variable and your configuration files. Previous file
5412 contents are saved with a ``.orig`` suffix.
5412 contents are saved with a ``.orig`` suffix.
5413
5413
5414 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5414 - :hg:`resolve -m [FILE]`: mark a file as having been resolved
5415 (e.g. after having manually fixed-up the files). The default is
5415 (e.g. after having manually fixed-up the files). The default is
5416 to mark all unresolved files.
5416 to mark all unresolved files.
5417
5417
5418 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5418 - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The
5419 default is to mark all resolved files.
5419 default is to mark all resolved files.
5420
5420
5421 - :hg:`resolve -l`: list files which had or still have conflicts.
5421 - :hg:`resolve -l`: list files which had or still have conflicts.
5422 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5422 In the printed list, ``U`` = unresolved and ``R`` = resolved.
5423
5423
5424 .. note::
5424 .. note::
5425
5425
5426 Mercurial will not let you commit files with unresolved merge
5426 Mercurial will not let you commit files with unresolved merge
5427 conflicts. You must use :hg:`resolve -m ...` before you can
5427 conflicts. You must use :hg:`resolve -m ...` before you can
5428 commit after a conflicting merge.
5428 commit after a conflicting merge.
5429
5429
5430 Returns 0 on success, 1 if any files fail a resolve attempt.
5430 Returns 0 on success, 1 if any files fail a resolve attempt.
5431 """
5431 """
5432
5432
5433 flaglist = 'all mark unmark list no_status'.split()
5433 flaglist = 'all mark unmark list no_status'.split()
5434 all, mark, unmark, show, nostatus = \
5434 all, mark, unmark, show, nostatus = \
5435 [opts.get(o) for o in flaglist]
5435 [opts.get(o) for o in flaglist]
5436
5436
5437 if (show and (mark or unmark)) or (mark and unmark):
5437 if (show and (mark or unmark)) or (mark and unmark):
5438 raise error.Abort(_("too many options specified"))
5438 raise error.Abort(_("too many options specified"))
5439 if pats and all:
5439 if pats and all:
5440 raise error.Abort(_("can't specify --all and patterns"))
5440 raise error.Abort(_("can't specify --all and patterns"))
5441 if not (all or pats or show or mark or unmark):
5441 if not (all or pats or show or mark or unmark):
5442 raise error.Abort(_('no files or directories specified'),
5442 raise error.Abort(_('no files or directories specified'),
5443 hint=('use --all to re-merge all unresolved files'))
5443 hint=('use --all to re-merge all unresolved files'))
5444
5444
5445 if show:
5445 if show:
5446 fm = ui.formatter('resolve', opts)
5446 fm = ui.formatter('resolve', opts)
5447 ms = mergemod.mergestate.read(repo)
5447 ms = mergemod.mergestate.read(repo)
5448 m = scmutil.match(repo[None], pats, opts)
5448 m = scmutil.match(repo[None], pats, opts)
5449 for f in ms:
5449 for f in ms:
5450 if not m(f):
5450 if not m(f):
5451 continue
5451 continue
5452 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
5452 l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved',
5453 'd': 'driverresolved'}[ms[f]]
5453 'd': 'driverresolved'}[ms[f]]
5454 fm.startitem()
5454 fm.startitem()
5455 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5455 fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l)
5456 fm.write('path', '%s\n', f, label=l)
5456 fm.write('path', '%s\n', f, label=l)
5457 fm.end()
5457 fm.end()
5458 return 0
5458 return 0
5459
5459
5460 with repo.wlock():
5460 with repo.wlock():
5461 ms = mergemod.mergestate.read(repo)
5461 ms = mergemod.mergestate.read(repo)
5462
5462
5463 if not (ms.active() or repo.dirstate.p2() != nullid):
5463 if not (ms.active() or repo.dirstate.p2() != nullid):
5464 raise error.Abort(
5464 raise error.Abort(
5465 _('resolve command not applicable when not merging'))
5465 _('resolve command not applicable when not merging'))
5466
5466
5467 wctx = repo[None]
5467 wctx = repo[None]
5468
5468
5469 if ms.mergedriver and ms.mdstate() == 'u':
5469 if ms.mergedriver and ms.mdstate() == 'u':
5470 proceed = mergemod.driverpreprocess(repo, ms, wctx)
5470 proceed = mergemod.driverpreprocess(repo, ms, wctx)
5471 ms.commit()
5471 ms.commit()
5472 # allow mark and unmark to go through
5472 # allow mark and unmark to go through
5473 if not mark and not unmark and not proceed:
5473 if not mark and not unmark and not proceed:
5474 return 1
5474 return 1
5475
5475
5476 m = scmutil.match(wctx, pats, opts)
5476 m = scmutil.match(wctx, pats, opts)
5477 ret = 0
5477 ret = 0
5478 didwork = False
5478 didwork = False
5479 runconclude = False
5479 runconclude = False
5480
5480
5481 tocomplete = []
5481 tocomplete = []
5482 for f in ms:
5482 for f in ms:
5483 if not m(f):
5483 if not m(f):
5484 continue
5484 continue
5485
5485
5486 didwork = True
5486 didwork = True
5487
5487
5488 # don't let driver-resolved files be marked, and run the conclude
5488 # don't let driver-resolved files be marked, and run the conclude
5489 # step if asked to resolve
5489 # step if asked to resolve
5490 if ms[f] == "d":
5490 if ms[f] == "d":
5491 exact = m.exact(f)
5491 exact = m.exact(f)
5492 if mark:
5492 if mark:
5493 if exact:
5493 if exact:
5494 ui.warn(_('not marking %s as it is driver-resolved\n')
5494 ui.warn(_('not marking %s as it is driver-resolved\n')
5495 % f)
5495 % f)
5496 elif unmark:
5496 elif unmark:
5497 if exact:
5497 if exact:
5498 ui.warn(_('not unmarking %s as it is driver-resolved\n')
5498 ui.warn(_('not unmarking %s as it is driver-resolved\n')
5499 % f)
5499 % f)
5500 else:
5500 else:
5501 runconclude = True
5501 runconclude = True
5502 continue
5502 continue
5503
5503
5504 if mark:
5504 if mark:
5505 ms.mark(f, "r")
5505 ms.mark(f, "r")
5506 elif unmark:
5506 elif unmark:
5507 ms.mark(f, "u")
5507 ms.mark(f, "u")
5508 else:
5508 else:
5509 # backup pre-resolve (merge uses .orig for its own purposes)
5509 # backup pre-resolve (merge uses .orig for its own purposes)
5510 a = repo.wjoin(f)
5510 a = repo.wjoin(f)
5511 try:
5511 try:
5512 util.copyfile(a, a + ".resolve")
5512 util.copyfile(a, a + ".resolve")
5513 except (IOError, OSError) as inst:
5513 except (IOError, OSError) as inst:
5514 if inst.errno != errno.ENOENT:
5514 if inst.errno != errno.ENOENT:
5515 raise
5515 raise
5516
5516
5517 try:
5517 try:
5518 # preresolve file
5518 # preresolve file
5519 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5519 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5520 'resolve')
5520 'resolve')
5521 complete, r = ms.preresolve(f, wctx)
5521 complete, r = ms.preresolve(f, wctx)
5522 if not complete:
5522 if not complete:
5523 tocomplete.append(f)
5523 tocomplete.append(f)
5524 elif r:
5524 elif r:
5525 ret = 1
5525 ret = 1
5526 finally:
5526 finally:
5527 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5527 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5528 ms.commit()
5528 ms.commit()
5529
5529
5530 # replace filemerge's .orig file with our resolve file, but only
5530 # replace filemerge's .orig file with our resolve file, but only
5531 # for merges that are complete
5531 # for merges that are complete
5532 if complete:
5532 if complete:
5533 try:
5533 try:
5534 util.rename(a + ".resolve",
5534 util.rename(a + ".resolve",
5535 scmutil.origpath(ui, repo, a))
5535 scmutil.origpath(ui, repo, a))
5536 except OSError as inst:
5536 except OSError as inst:
5537 if inst.errno != errno.ENOENT:
5537 if inst.errno != errno.ENOENT:
5538 raise
5538 raise
5539
5539
5540 for f in tocomplete:
5540 for f in tocomplete:
5541 try:
5541 try:
5542 # resolve file
5542 # resolve file
5543 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5543 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
5544 'resolve')
5544 'resolve')
5545 r = ms.resolve(f, wctx)
5545 r = ms.resolve(f, wctx)
5546 if r:
5546 if r:
5547 ret = 1
5547 ret = 1
5548 finally:
5548 finally:
5549 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5549 ui.setconfig('ui', 'forcemerge', '', 'resolve')
5550 ms.commit()
5550 ms.commit()
5551
5551
5552 # replace filemerge's .orig file with our resolve file
5552 # replace filemerge's .orig file with our resolve file
5553 a = repo.wjoin(f)
5553 a = repo.wjoin(f)
5554 try:
5554 try:
5555 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
5555 util.rename(a + ".resolve", scmutil.origpath(ui, repo, a))
5556 except OSError as inst:
5556 except OSError as inst:
5557 if inst.errno != errno.ENOENT:
5557 if inst.errno != errno.ENOENT:
5558 raise
5558 raise
5559
5559
5560 ms.commit()
5560 ms.commit()
5561 ms.recordactions()
5561 ms.recordactions()
5562
5562
5563 if not didwork and pats:
5563 if not didwork and pats:
5564 hint = None
5564 hint = None
5565 if not any([p for p in pats if p.find(':') >= 0]):
5565 if not any([p for p in pats if p.find(':') >= 0]):
5566 pats = ['path:%s' % p for p in pats]
5566 pats = ['path:%s' % p for p in pats]
5567 m = scmutil.match(wctx, pats, opts)
5567 m = scmutil.match(wctx, pats, opts)
5568 for f in ms:
5568 for f in ms:
5569 if not m(f):
5569 if not m(f):
5570 continue
5570 continue
5571 flags = ''.join(['-%s ' % o[0] for o in flaglist
5571 flags = ''.join(['-%s ' % o[0] for o in flaglist
5572 if opts.get(o)])
5572 if opts.get(o)])
5573 hint = _("(try: hg resolve %s%s)\n") % (
5573 hint = _("(try: hg resolve %s%s)\n") % (
5574 flags,
5574 flags,
5575 ' '.join(pats))
5575 ' '.join(pats))
5576 break
5576 break
5577 ui.warn(_("arguments do not match paths that need resolving\n"))
5577 ui.warn(_("arguments do not match paths that need resolving\n"))
5578 if hint:
5578 if hint:
5579 ui.warn(hint)
5579 ui.warn(hint)
5580 elif ms.mergedriver and ms.mdstate() != 's':
5580 elif ms.mergedriver and ms.mdstate() != 's':
5581 # run conclude step when either a driver-resolved file is requested
5581 # run conclude step when either a driver-resolved file is requested
5582 # or there are no driver-resolved files
5582 # or there are no driver-resolved files
5583 # we can't use 'ret' to determine whether any files are unresolved
5583 # we can't use 'ret' to determine whether any files are unresolved
5584 # because we might not have tried to resolve some
5584 # because we might not have tried to resolve some
5585 if ((runconclude or not list(ms.driverresolved()))
5585 if ((runconclude or not list(ms.driverresolved()))
5586 and not list(ms.unresolved())):
5586 and not list(ms.unresolved())):
5587 proceed = mergemod.driverconclude(repo, ms, wctx)
5587 proceed = mergemod.driverconclude(repo, ms, wctx)
5588 ms.commit()
5588 ms.commit()
5589 if not proceed:
5589 if not proceed:
5590 return 1
5590 return 1
5591
5591
5592 # Nudge users into finishing an unfinished operation
5592 # Nudge users into finishing an unfinished operation
5593 unresolvedf = list(ms.unresolved())
5593 unresolvedf = list(ms.unresolved())
5594 driverresolvedf = list(ms.driverresolved())
5594 driverresolvedf = list(ms.driverresolved())
5595 if not unresolvedf and not driverresolvedf:
5595 if not unresolvedf and not driverresolvedf:
5596 ui.status(_('(no more unresolved files)\n'))
5596 ui.status(_('(no more unresolved files)\n'))
5597 cmdutil.checkafterresolved(repo)
5597 cmdutil.checkafterresolved(repo)
5598 elif not unresolvedf:
5598 elif not unresolvedf:
5599 ui.status(_('(no more unresolved files -- '
5599 ui.status(_('(no more unresolved files -- '
5600 'run "hg resolve --all" to conclude)\n'))
5600 'run "hg resolve --all" to conclude)\n'))
5601
5601
5602 return ret
5602 return ret
5603
5603
5604 @command('revert',
5604 @command('revert',
5605 [('a', 'all', None, _('revert all changes when no arguments given')),
5605 [('a', 'all', None, _('revert all changes when no arguments given')),
5606 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5606 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
5607 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5607 ('r', 'rev', '', _('revert to the specified revision'), _('REV')),
5608 ('C', 'no-backup', None, _('do not save backup copies of files')),
5608 ('C', 'no-backup', None, _('do not save backup copies of files')),
5609 ('i', 'interactive', None,
5609 ('i', 'interactive', None,
5610 _('interactively select the changes (EXPERIMENTAL)')),
5610 _('interactively select the changes (EXPERIMENTAL)')),
5611 ] + walkopts + dryrunopts,
5611 ] + walkopts + dryrunopts,
5612 _('[OPTION]... [-r REV] [NAME]...'))
5612 _('[OPTION]... [-r REV] [NAME]...'))
5613 def revert(ui, repo, *pats, **opts):
5613 def revert(ui, repo, *pats, **opts):
5614 """restore files to their checkout state
5614 """restore files to their checkout state
5615
5615
5616 .. note::
5616 .. note::
5617
5617
5618 To check out earlier revisions, you should use :hg:`update REV`.
5618 To check out earlier revisions, you should use :hg:`update REV`.
5619 To cancel an uncommitted merge (and lose your changes),
5619 To cancel an uncommitted merge (and lose your changes),
5620 use :hg:`update --clean .`.
5620 use :hg:`update --clean .`.
5621
5621
5622 With no revision specified, revert the specified files or directories
5622 With no revision specified, revert the specified files or directories
5623 to the contents they had in the parent of the working directory.
5623 to the contents they had in the parent of the working directory.
5624 This restores the contents of files to an unmodified
5624 This restores the contents of files to an unmodified
5625 state and unschedules adds, removes, copies, and renames. If the
5625 state and unschedules adds, removes, copies, and renames. If the
5626 working directory has two parents, you must explicitly specify a
5626 working directory has two parents, you must explicitly specify a
5627 revision.
5627 revision.
5628
5628
5629 Using the -r/--rev or -d/--date options, revert the given files or
5629 Using the -r/--rev or -d/--date options, revert the given files or
5630 directories to their states as of a specific revision. Because
5630 directories to their states as of a specific revision. Because
5631 revert does not change the working directory parents, this will
5631 revert does not change the working directory parents, this will
5632 cause these files to appear modified. This can be helpful to "back
5632 cause these files to appear modified. This can be helpful to "back
5633 out" some or all of an earlier change. See :hg:`backout` for a
5633 out" some or all of an earlier change. See :hg:`backout` for a
5634 related method.
5634 related method.
5635
5635
5636 Modified files are saved with a .orig suffix before reverting.
5636 Modified files are saved with a .orig suffix before reverting.
5637 To disable these backups, use --no-backup. It is possible to store
5637 To disable these backups, use --no-backup. It is possible to store
5638 the backup files in a custom directory relative to the root of the
5638 the backup files in a custom directory relative to the root of the
5639 repository by setting the ``ui.origbackuppath`` configuration
5639 repository by setting the ``ui.origbackuppath`` configuration
5640 option.
5640 option.
5641
5641
5642 See :hg:`help dates` for a list of formats valid for -d/--date.
5642 See :hg:`help dates` for a list of formats valid for -d/--date.
5643
5643
5644 See :hg:`help backout` for a way to reverse the effect of an
5644 See :hg:`help backout` for a way to reverse the effect of an
5645 earlier changeset.
5645 earlier changeset.
5646
5646
5647 Returns 0 on success.
5647 Returns 0 on success.
5648 """
5648 """
5649
5649
5650 if opts.get("date"):
5650 if opts.get("date"):
5651 if opts.get("rev"):
5651 if opts.get("rev"):
5652 raise error.Abort(_("you can't specify a revision and a date"))
5652 raise error.Abort(_("you can't specify a revision and a date"))
5653 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5653 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
5654
5654
5655 parent, p2 = repo.dirstate.parents()
5655 parent, p2 = repo.dirstate.parents()
5656 if not opts.get('rev') and p2 != nullid:
5656 if not opts.get('rev') and p2 != nullid:
5657 # revert after merge is a trap for new users (issue2915)
5657 # revert after merge is a trap for new users (issue2915)
5658 raise error.Abort(_('uncommitted merge with no revision specified'),
5658 raise error.Abort(_('uncommitted merge with no revision specified'),
5659 hint=_("use 'hg update' or see 'hg help revert'"))
5659 hint=_("use 'hg update' or see 'hg help revert'"))
5660
5660
5661 ctx = scmutil.revsingle(repo, opts.get('rev'))
5661 ctx = scmutil.revsingle(repo, opts.get('rev'))
5662
5662
5663 if (not (pats or opts.get('include') or opts.get('exclude') or
5663 if (not (pats or opts.get('include') or opts.get('exclude') or
5664 opts.get('all') or opts.get('interactive'))):
5664 opts.get('all') or opts.get('interactive'))):
5665 msg = _("no files or directories specified")
5665 msg = _("no files or directories specified")
5666 if p2 != nullid:
5666 if p2 != nullid:
5667 hint = _("uncommitted merge, use --all to discard all changes,"
5667 hint = _("uncommitted merge, use --all to discard all changes,"
5668 " or 'hg update -C .' to abort the merge")
5668 " or 'hg update -C .' to abort the merge")
5669 raise error.Abort(msg, hint=hint)
5669 raise error.Abort(msg, hint=hint)
5670 dirty = any(repo.status())
5670 dirty = any(repo.status())
5671 node = ctx.node()
5671 node = ctx.node()
5672 if node != parent:
5672 if node != parent:
5673 if dirty:
5673 if dirty:
5674 hint = _("uncommitted changes, use --all to discard all"
5674 hint = _("uncommitted changes, use --all to discard all"
5675 " changes, or 'hg update %s' to update") % ctx.rev()
5675 " changes, or 'hg update %s' to update") % ctx.rev()
5676 else:
5676 else:
5677 hint = _("use --all to revert all files,"
5677 hint = _("use --all to revert all files,"
5678 " or 'hg update %s' to update") % ctx.rev()
5678 " or 'hg update %s' to update") % ctx.rev()
5679 elif dirty:
5679 elif dirty:
5680 hint = _("uncommitted changes, use --all to discard all changes")
5680 hint = _("uncommitted changes, use --all to discard all changes")
5681 else:
5681 else:
5682 hint = _("use --all to revert all files")
5682 hint = _("use --all to revert all files")
5683 raise error.Abort(msg, hint=hint)
5683 raise error.Abort(msg, hint=hint)
5684
5684
5685 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5685 return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
5686
5686
5687 @command('rollback', dryrunopts +
5687 @command('rollback', dryrunopts +
5688 [('f', 'force', False, _('ignore safety measures'))])
5688 [('f', 'force', False, _('ignore safety measures'))])
5689 def rollback(ui, repo, **opts):
5689 def rollback(ui, repo, **opts):
5690 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5690 """roll back the last transaction (DANGEROUS) (DEPRECATED)
5691
5691
5692 Please use :hg:`commit --amend` instead of rollback to correct
5692 Please use :hg:`commit --amend` instead of rollback to correct
5693 mistakes in the last commit.
5693 mistakes in the last commit.
5694
5694
5695 This command should be used with care. There is only one level of
5695 This command should be used with care. There is only one level of
5696 rollback, and there is no way to undo a rollback. It will also
5696 rollback, and there is no way to undo a rollback. It will also
5697 restore the dirstate at the time of the last transaction, losing
5697 restore the dirstate at the time of the last transaction, losing
5698 any dirstate changes since that time. This command does not alter
5698 any dirstate changes since that time. This command does not alter
5699 the working directory.
5699 the working directory.
5700
5700
5701 Transactions are used to encapsulate the effects of all commands
5701 Transactions are used to encapsulate the effects of all commands
5702 that create new changesets or propagate existing changesets into a
5702 that create new changesets or propagate existing changesets into a
5703 repository.
5703 repository.
5704
5704
5705 .. container:: verbose
5705 .. container:: verbose
5706
5706
5707 For example, the following commands are transactional, and their
5707 For example, the following commands are transactional, and their
5708 effects can be rolled back:
5708 effects can be rolled back:
5709
5709
5710 - commit
5710 - commit
5711 - import
5711 - import
5712 - pull
5712 - pull
5713 - push (with this repository as the destination)
5713 - push (with this repository as the destination)
5714 - unbundle
5714 - unbundle
5715
5715
5716 To avoid permanent data loss, rollback will refuse to rollback a
5716 To avoid permanent data loss, rollback will refuse to rollback a
5717 commit transaction if it isn't checked out. Use --force to
5717 commit transaction if it isn't checked out. Use --force to
5718 override this protection.
5718 override this protection.
5719
5719
5720 The rollback command can be entirely disabled by setting the
5720 The rollback command can be entirely disabled by setting the
5721 ``ui.rollback`` configuration setting to false. If you're here
5721 ``ui.rollback`` configuration setting to false. If you're here
5722 because you want to use rollback and it's disabled, you can
5722 because you want to use rollback and it's disabled, you can
5723 re-enable the command by setting ``ui.rollback`` to true.
5723 re-enable the command by setting ``ui.rollback`` to true.
5724
5724
5725 This command is not intended for use on public repositories. Once
5725 This command is not intended for use on public repositories. Once
5726 changes are visible for pull by other users, rolling a transaction
5726 changes are visible for pull by other users, rolling a transaction
5727 back locally is ineffective (someone else may already have pulled
5727 back locally is ineffective (someone else may already have pulled
5728 the changes). Furthermore, a race is possible with readers of the
5728 the changes). Furthermore, a race is possible with readers of the
5729 repository; for example an in-progress pull from the repository
5729 repository; for example an in-progress pull from the repository
5730 may fail if a rollback is performed.
5730 may fail if a rollback is performed.
5731
5731
5732 Returns 0 on success, 1 if no rollback data is available.
5732 Returns 0 on success, 1 if no rollback data is available.
5733 """
5733 """
5734 if not ui.configbool('ui', 'rollback', True):
5734 if not ui.configbool('ui', 'rollback', True):
5735 raise error.Abort(_('rollback is disabled because it is unsafe'),
5735 raise error.Abort(_('rollback is disabled because it is unsafe'),
5736 hint=('see `hg help -v rollback` for information'))
5736 hint=('see `hg help -v rollback` for information'))
5737 return repo.rollback(dryrun=opts.get('dry_run'),
5737 return repo.rollback(dryrun=opts.get('dry_run'),
5738 force=opts.get('force'))
5738 force=opts.get('force'))
5739
5739
5740 @command('root', [])
5740 @command('root', [])
5741 def root(ui, repo):
5741 def root(ui, repo):
5742 """print the root (top) of the current working directory
5742 """print the root (top) of the current working directory
5743
5743
5744 Print the root directory of the current repository.
5744 Print the root directory of the current repository.
5745
5745
5746 Returns 0 on success.
5746 Returns 0 on success.
5747 """
5747 """
5748 ui.write(repo.root + "\n")
5748 ui.write(repo.root + "\n")
5749
5749
5750 @command('^serve',
5750 @command('^serve',
5751 [('A', 'accesslog', '', _('name of access log file to write to'),
5751 [('A', 'accesslog', '', _('name of access log file to write to'),
5752 _('FILE')),
5752 _('FILE')),
5753 ('d', 'daemon', None, _('run server in background')),
5753 ('d', 'daemon', None, _('run server in background')),
5754 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
5754 ('', 'daemon-postexec', [], _('used internally by daemon mode')),
5755 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5755 ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')),
5756 # use string type, then we can check if something was passed
5756 # use string type, then we can check if something was passed
5757 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5757 ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')),
5758 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5758 ('a', 'address', '', _('address to listen on (default: all interfaces)'),
5759 _('ADDR')),
5759 _('ADDR')),
5760 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5760 ('', 'prefix', '', _('prefix path to serve from (default: server root)'),
5761 _('PREFIX')),
5761 _('PREFIX')),
5762 ('n', 'name', '',
5762 ('n', 'name', '',
5763 _('name to show in web pages (default: working directory)'), _('NAME')),
5763 _('name to show in web pages (default: working directory)'), _('NAME')),
5764 ('', 'web-conf', '',
5764 ('', 'web-conf', '',
5765 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
5765 _("name of the hgweb config file (see 'hg help hgweb')"), _('FILE')),
5766 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5766 ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'),
5767 _('FILE')),
5767 _('FILE')),
5768 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5768 ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')),
5769 ('', 'stdio', None, _('for remote clients')),
5769 ('', 'stdio', None, _('for remote clients')),
5770 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5770 ('', 'cmdserver', '', _('for remote clients'), _('MODE')),
5771 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5771 ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')),
5772 ('', 'style', '', _('template style to use'), _('STYLE')),
5772 ('', 'style', '', _('template style to use'), _('STYLE')),
5773 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5773 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
5774 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5774 ('', 'certificate', '', _('SSL certificate file'), _('FILE'))],
5775 _('[OPTION]...'),
5775 _('[OPTION]...'),
5776 optionalrepo=True)
5776 optionalrepo=True)
5777 def serve(ui, repo, **opts):
5777 def serve(ui, repo, **opts):
5778 """start stand-alone webserver
5778 """start stand-alone webserver
5779
5779
5780 Start a local HTTP repository browser and pull server. You can use
5780 Start a local HTTP repository browser and pull server. You can use
5781 this for ad-hoc sharing and browsing of repositories. It is
5781 this for ad-hoc sharing and browsing of repositories. It is
5782 recommended to use a real web server to serve a repository for
5782 recommended to use a real web server to serve a repository for
5783 longer periods of time.
5783 longer periods of time.
5784
5784
5785 Please note that the server does not implement access control.
5785 Please note that the server does not implement access control.
5786 This means that, by default, anybody can read from the server and
5786 This means that, by default, anybody can read from the server and
5787 nobody can write to it by default. Set the ``web.allow_push``
5787 nobody can write to it by default. Set the ``web.allow_push``
5788 option to ``*`` to allow everybody to push to the server. You
5788 option to ``*`` to allow everybody to push to the server. You
5789 should use a real web server if you need to authenticate users.
5789 should use a real web server if you need to authenticate users.
5790
5790
5791 By default, the server logs accesses to stdout and errors to
5791 By default, the server logs accesses to stdout and errors to
5792 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5792 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
5793 files.
5793 files.
5794
5794
5795 To have the server choose a free port number to listen on, specify
5795 To have the server choose a free port number to listen on, specify
5796 a port number of 0; in this case, the server will print the port
5796 a port number of 0; in this case, the server will print the port
5797 number it uses.
5797 number it uses.
5798
5798
5799 Returns 0 on success.
5799 Returns 0 on success.
5800 """
5800 """
5801
5801
5802 if opts["stdio"] and opts["cmdserver"]:
5802 if opts["stdio"] and opts["cmdserver"]:
5803 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5803 raise error.Abort(_("cannot use --stdio with --cmdserver"))
5804
5804
5805 if opts["stdio"]:
5805 if opts["stdio"]:
5806 if repo is None:
5806 if repo is None:
5807 raise error.RepoError(_("there is no Mercurial repository here"
5807 raise error.RepoError(_("there is no Mercurial repository here"
5808 " (.hg not found)"))
5808 " (.hg not found)"))
5809 s = sshserver.sshserver(ui, repo)
5809 s = sshserver.sshserver(ui, repo)
5810 s.serve_forever()
5810 s.serve_forever()
5811
5811
5812 service = server.createservice(ui, repo, opts)
5812 service = server.createservice(ui, repo, opts)
5813 return server.runservice(opts, initfn=service.init, runfn=service.run)
5813 return server.runservice(opts, initfn=service.init, runfn=service.run)
5814
5814
5815 @command('^status|st',
5815 @command('^status|st',
5816 [('A', 'all', None, _('show status of all files')),
5816 [('A', 'all', None, _('show status of all files')),
5817 ('m', 'modified', None, _('show only modified files')),
5817 ('m', 'modified', None, _('show only modified files')),
5818 ('a', 'added', None, _('show only added files')),
5818 ('a', 'added', None, _('show only added files')),
5819 ('r', 'removed', None, _('show only removed files')),
5819 ('r', 'removed', None, _('show only removed files')),
5820 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5820 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
5821 ('c', 'clean', None, _('show only files without changes')),
5821 ('c', 'clean', None, _('show only files without changes')),
5822 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5822 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
5823 ('i', 'ignored', None, _('show only ignored files')),
5823 ('i', 'ignored', None, _('show only ignored files')),
5824 ('n', 'no-status', None, _('hide status prefix')),
5824 ('n', 'no-status', None, _('hide status prefix')),
5825 ('C', 'copies', None, _('show source of copied files')),
5825 ('C', 'copies', None, _('show source of copied files')),
5826 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5826 ('0', 'print0', None, _('end filenames with NUL, for use with xargs')),
5827 ('', 'rev', [], _('show difference from revision'), _('REV')),
5827 ('', 'rev', [], _('show difference from revision'), _('REV')),
5828 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5828 ('', 'change', '', _('list the changed files of a revision'), _('REV')),
5829 ] + walkopts + subrepoopts + formatteropts,
5829 ] + walkopts + subrepoopts + formatteropts,
5830 _('[OPTION]... [FILE]...'),
5830 _('[OPTION]... [FILE]...'),
5831 inferrepo=True)
5831 inferrepo=True)
5832 def status(ui, repo, *pats, **opts):
5832 def status(ui, repo, *pats, **opts):
5833 """show changed files in the working directory
5833 """show changed files in the working directory
5834
5834
5835 Show status of files in the repository. If names are given, only
5835 Show status of files in the repository. If names are given, only
5836 files that match are shown. Files that are clean or ignored or
5836 files that match are shown. Files that are clean or ignored or
5837 the source of a copy/move operation, are not listed unless
5837 the source of a copy/move operation, are not listed unless
5838 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5838 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
5839 Unless options described with "show only ..." are given, the
5839 Unless options described with "show only ..." are given, the
5840 options -mardu are used.
5840 options -mardu are used.
5841
5841
5842 Option -q/--quiet hides untracked (unknown and ignored) files
5842 Option -q/--quiet hides untracked (unknown and ignored) files
5843 unless explicitly requested with -u/--unknown or -i/--ignored.
5843 unless explicitly requested with -u/--unknown or -i/--ignored.
5844
5844
5845 .. note::
5845 .. note::
5846
5846
5847 :hg:`status` may appear to disagree with diff if permissions have
5847 :hg:`status` may appear to disagree with diff if permissions have
5848 changed or a merge has occurred. The standard diff format does
5848 changed or a merge has occurred. The standard diff format does
5849 not report permission changes and diff only reports changes
5849 not report permission changes and diff only reports changes
5850 relative to one merge parent.
5850 relative to one merge parent.
5851
5851
5852 If one revision is given, it is used as the base revision.
5852 If one revision is given, it is used as the base revision.
5853 If two revisions are given, the differences between them are
5853 If two revisions are given, the differences between them are
5854 shown. The --change option can also be used as a shortcut to list
5854 shown. The --change option can also be used as a shortcut to list
5855 the changed files of a revision from its first parent.
5855 the changed files of a revision from its first parent.
5856
5856
5857 The codes used to show the status of files are::
5857 The codes used to show the status of files are::
5858
5858
5859 M = modified
5859 M = modified
5860 A = added
5860 A = added
5861 R = removed
5861 R = removed
5862 C = clean
5862 C = clean
5863 ! = missing (deleted by non-hg command, but still tracked)
5863 ! = missing (deleted by non-hg command, but still tracked)
5864 ? = not tracked
5864 ? = not tracked
5865 I = ignored
5865 I = ignored
5866 = origin of the previous file (with --copies)
5866 = origin of the previous file (with --copies)
5867
5867
5868 .. container:: verbose
5868 .. container:: verbose
5869
5869
5870 Examples:
5870 Examples:
5871
5871
5872 - show changes in the working directory relative to a
5872 - show changes in the working directory relative to a
5873 changeset::
5873 changeset::
5874
5874
5875 hg status --rev 9353
5875 hg status --rev 9353
5876
5876
5877 - show changes in the working directory relative to the
5877 - show changes in the working directory relative to the
5878 current directory (see :hg:`help patterns` for more information)::
5878 current directory (see :hg:`help patterns` for more information)::
5879
5879
5880 hg status re:
5880 hg status re:
5881
5881
5882 - show all changes including copies in an existing changeset::
5882 - show all changes including copies in an existing changeset::
5883
5883
5884 hg status --copies --change 9353
5884 hg status --copies --change 9353
5885
5885
5886 - get a NUL separated list of added files, suitable for xargs::
5886 - get a NUL separated list of added files, suitable for xargs::
5887
5887
5888 hg status -an0
5888 hg status -an0
5889
5889
5890 Returns 0 on success.
5890 Returns 0 on success.
5891 """
5891 """
5892
5892
5893 revs = opts.get('rev')
5893 revs = opts.get('rev')
5894 change = opts.get('change')
5894 change = opts.get('change')
5895
5895
5896 if revs and change:
5896 if revs and change:
5897 msg = _('cannot specify --rev and --change at the same time')
5897 msg = _('cannot specify --rev and --change at the same time')
5898 raise error.Abort(msg)
5898 raise error.Abort(msg)
5899 elif change:
5899 elif change:
5900 node2 = scmutil.revsingle(repo, change, None).node()
5900 node2 = scmutil.revsingle(repo, change, None).node()
5901 node1 = repo[node2].p1().node()
5901 node1 = repo[node2].p1().node()
5902 else:
5902 else:
5903 node1, node2 = scmutil.revpair(repo, revs)
5903 node1, node2 = scmutil.revpair(repo, revs)
5904
5904
5905 if pats:
5905 if pats:
5906 cwd = repo.getcwd()
5906 cwd = repo.getcwd()
5907 else:
5907 else:
5908 cwd = ''
5908 cwd = ''
5909
5909
5910 if opts.get('print0'):
5910 if opts.get('print0'):
5911 end = '\0'
5911 end = '\0'
5912 else:
5912 else:
5913 end = '\n'
5913 end = '\n'
5914 copy = {}
5914 copy = {}
5915 states = 'modified added removed deleted unknown ignored clean'.split()
5915 states = 'modified added removed deleted unknown ignored clean'.split()
5916 show = [k for k in states if opts.get(k)]
5916 show = [k for k in states if opts.get(k)]
5917 if opts.get('all'):
5917 if opts.get('all'):
5918 show += ui.quiet and (states[:4] + ['clean']) or states
5918 show += ui.quiet and (states[:4] + ['clean']) or states
5919 if not show:
5919 if not show:
5920 if ui.quiet:
5920 if ui.quiet:
5921 show = states[:4]
5921 show = states[:4]
5922 else:
5922 else:
5923 show = states[:5]
5923 show = states[:5]
5924
5924
5925 m = scmutil.match(repo[node2], pats, opts)
5925 m = scmutil.match(repo[node2], pats, opts)
5926 stat = repo.status(node1, node2, m,
5926 stat = repo.status(node1, node2, m,
5927 'ignored' in show, 'clean' in show, 'unknown' in show,
5927 'ignored' in show, 'clean' in show, 'unknown' in show,
5928 opts.get('subrepos'))
5928 opts.get('subrepos'))
5929 changestates = zip(states, 'MAR!?IC', stat)
5929 changestates = zip(states, 'MAR!?IC', stat)
5930
5930
5931 if (opts.get('all') or opts.get('copies')
5931 if (opts.get('all') or opts.get('copies')
5932 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5932 or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'):
5933 copy = copies.pathcopies(repo[node1], repo[node2], m)
5933 copy = copies.pathcopies(repo[node1], repo[node2], m)
5934
5934
5935 fm = ui.formatter('status', opts)
5935 fm = ui.formatter('status', opts)
5936 fmt = '%s' + end
5936 fmt = '%s' + end
5937 showchar = not opts.get('no_status')
5937 showchar = not opts.get('no_status')
5938
5938
5939 for state, char, files in changestates:
5939 for state, char, files in changestates:
5940 if state in show:
5940 if state in show:
5941 label = 'status.' + state
5941 label = 'status.' + state
5942 for f in files:
5942 for f in files:
5943 fm.startitem()
5943 fm.startitem()
5944 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5944 fm.condwrite(showchar, 'status', '%s ', char, label=label)
5945 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5945 fm.write('path', fmt, repo.pathto(f, cwd), label=label)
5946 if f in copy:
5946 if f in copy:
5947 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5947 fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
5948 label='status.copied')
5948 label='status.copied')
5949 fm.end()
5949 fm.end()
5950
5950
5951 @command('^summary|sum',
5951 @command('^summary|sum',
5952 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5952 [('', 'remote', None, _('check for push and pull'))], '[--remote]')
5953 def summary(ui, repo, **opts):
5953 def summary(ui, repo, **opts):
5954 """summarize working directory state
5954 """summarize working directory state
5955
5955
5956 This generates a brief summary of the working directory state,
5956 This generates a brief summary of the working directory state,
5957 including parents, branch, commit status, phase and available updates.
5957 including parents, branch, commit status, phase and available updates.
5958
5958
5959 With the --remote option, this will check the default paths for
5959 With the --remote option, this will check the default paths for
5960 incoming and outgoing changes. This can be time-consuming.
5960 incoming and outgoing changes. This can be time-consuming.
5961
5961
5962 Returns 0 on success.
5962 Returns 0 on success.
5963 """
5963 """
5964
5964
5965 ctx = repo[None]
5965 ctx = repo[None]
5966 parents = ctx.parents()
5966 parents = ctx.parents()
5967 pnode = parents[0].node()
5967 pnode = parents[0].node()
5968 marks = []
5968 marks = []
5969
5969
5970 ms = None
5970 ms = None
5971 try:
5971 try:
5972 ms = mergemod.mergestate.read(repo)
5972 ms = mergemod.mergestate.read(repo)
5973 except error.UnsupportedMergeRecords as e:
5973 except error.UnsupportedMergeRecords as e:
5974 s = ' '.join(e.recordtypes)
5974 s = ' '.join(e.recordtypes)
5975 ui.warn(
5975 ui.warn(
5976 _('warning: merge state has unsupported record types: %s\n') % s)
5976 _('warning: merge state has unsupported record types: %s\n') % s)
5977 unresolved = 0
5977 unresolved = 0
5978 else:
5978 else:
5979 unresolved = [f for f in ms if ms[f] == 'u']
5979 unresolved = [f for f in ms if ms[f] == 'u']
5980
5980
5981 for p in parents:
5981 for p in parents:
5982 # label with log.changeset (instead of log.parent) since this
5982 # label with log.changeset (instead of log.parent) since this
5983 # shows a working directory parent *changeset*:
5983 # shows a working directory parent *changeset*:
5984 # i18n: column positioning for "hg summary"
5984 # i18n: column positioning for "hg summary"
5985 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5985 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
5986 label='log.changeset changeset.%s' % p.phasestr())
5986 label='log.changeset changeset.%s' % p.phasestr())
5987 ui.write(' '.join(p.tags()), label='log.tag')
5987 ui.write(' '.join(p.tags()), label='log.tag')
5988 if p.bookmarks():
5988 if p.bookmarks():
5989 marks.extend(p.bookmarks())
5989 marks.extend(p.bookmarks())
5990 if p.rev() == -1:
5990 if p.rev() == -1:
5991 if not len(repo):
5991 if not len(repo):
5992 ui.write(_(' (empty repository)'))
5992 ui.write(_(' (empty repository)'))
5993 else:
5993 else:
5994 ui.write(_(' (no revision checked out)'))
5994 ui.write(_(' (no revision checked out)'))
5995 ui.write('\n')
5995 ui.write('\n')
5996 if p.description():
5996 if p.description():
5997 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5997 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
5998 label='log.summary')
5998 label='log.summary')
5999
5999
6000 branch = ctx.branch()
6000 branch = ctx.branch()
6001 bheads = repo.branchheads(branch)
6001 bheads = repo.branchheads(branch)
6002 # i18n: column positioning for "hg summary"
6002 # i18n: column positioning for "hg summary"
6003 m = _('branch: %s\n') % branch
6003 m = _('branch: %s\n') % branch
6004 if branch != 'default':
6004 if branch != 'default':
6005 ui.write(m, label='log.branch')
6005 ui.write(m, label='log.branch')
6006 else:
6006 else:
6007 ui.status(m, label='log.branch')
6007 ui.status(m, label='log.branch')
6008
6008
6009 if marks:
6009 if marks:
6010 active = repo._activebookmark
6010 active = repo._activebookmark
6011 # i18n: column positioning for "hg summary"
6011 # i18n: column positioning for "hg summary"
6012 ui.write(_('bookmarks:'), label='log.bookmark')
6012 ui.write(_('bookmarks:'), label='log.bookmark')
6013 if active is not None:
6013 if active is not None:
6014 if active in marks:
6014 if active in marks:
6015 ui.write(' *' + active, label=activebookmarklabel)
6015 ui.write(' *' + active, label=activebookmarklabel)
6016 marks.remove(active)
6016 marks.remove(active)
6017 else:
6017 else:
6018 ui.write(' [%s]' % active, label=activebookmarklabel)
6018 ui.write(' [%s]' % active, label=activebookmarklabel)
6019 for m in marks:
6019 for m in marks:
6020 ui.write(' ' + m, label='log.bookmark')
6020 ui.write(' ' + m, label='log.bookmark')
6021 ui.write('\n', label='log.bookmark')
6021 ui.write('\n', label='log.bookmark')
6022
6022
6023 status = repo.status(unknown=True)
6023 status = repo.status(unknown=True)
6024
6024
6025 c = repo.dirstate.copies()
6025 c = repo.dirstate.copies()
6026 copied, renamed = [], []
6026 copied, renamed = [], []
6027 for d, s in c.iteritems():
6027 for d, s in c.iteritems():
6028 if s in status.removed:
6028 if s in status.removed:
6029 status.removed.remove(s)
6029 status.removed.remove(s)
6030 renamed.append(d)
6030 renamed.append(d)
6031 else:
6031 else:
6032 copied.append(d)
6032 copied.append(d)
6033 if d in status.added:
6033 if d in status.added:
6034 status.added.remove(d)
6034 status.added.remove(d)
6035
6035
6036 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6036 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
6037
6037
6038 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6038 labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified),
6039 (ui.label(_('%d added'), 'status.added'), status.added),
6039 (ui.label(_('%d added'), 'status.added'), status.added),
6040 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6040 (ui.label(_('%d removed'), 'status.removed'), status.removed),
6041 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6041 (ui.label(_('%d renamed'), 'status.copied'), renamed),
6042 (ui.label(_('%d copied'), 'status.copied'), copied),
6042 (ui.label(_('%d copied'), 'status.copied'), copied),
6043 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6043 (ui.label(_('%d deleted'), 'status.deleted'), status.deleted),
6044 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6044 (ui.label(_('%d unknown'), 'status.unknown'), status.unknown),
6045 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6045 (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved),
6046 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6046 (ui.label(_('%d subrepos'), 'status.modified'), subs)]
6047 t = []
6047 t = []
6048 for l, s in labels:
6048 for l, s in labels:
6049 if s:
6049 if s:
6050 t.append(l % len(s))
6050 t.append(l % len(s))
6051
6051
6052 t = ', '.join(t)
6052 t = ', '.join(t)
6053 cleanworkdir = False
6053 cleanworkdir = False
6054
6054
6055 if repo.vfs.exists('graftstate'):
6055 if repo.vfs.exists('graftstate'):
6056 t += _(' (graft in progress)')
6056 t += _(' (graft in progress)')
6057 if repo.vfs.exists('updatestate'):
6057 if repo.vfs.exists('updatestate'):
6058 t += _(' (interrupted update)')
6058 t += _(' (interrupted update)')
6059 elif len(parents) > 1:
6059 elif len(parents) > 1:
6060 t += _(' (merge)')
6060 t += _(' (merge)')
6061 elif branch != parents[0].branch():
6061 elif branch != parents[0].branch():
6062 t += _(' (new branch)')
6062 t += _(' (new branch)')
6063 elif (parents[0].closesbranch() and
6063 elif (parents[0].closesbranch() and
6064 pnode in repo.branchheads(branch, closed=True)):
6064 pnode in repo.branchheads(branch, closed=True)):
6065 t += _(' (head closed)')
6065 t += _(' (head closed)')
6066 elif not (status.modified or status.added or status.removed or renamed or
6066 elif not (status.modified or status.added or status.removed or renamed or
6067 copied or subs):
6067 copied or subs):
6068 t += _(' (clean)')
6068 t += _(' (clean)')
6069 cleanworkdir = True
6069 cleanworkdir = True
6070 elif pnode not in bheads:
6070 elif pnode not in bheads:
6071 t += _(' (new branch head)')
6071 t += _(' (new branch head)')
6072
6072
6073 if parents:
6073 if parents:
6074 pendingphase = max(p.phase() for p in parents)
6074 pendingphase = max(p.phase() for p in parents)
6075 else:
6075 else:
6076 pendingphase = phases.public
6076 pendingphase = phases.public
6077
6077
6078 if pendingphase > phases.newcommitphase(ui):
6078 if pendingphase > phases.newcommitphase(ui):
6079 t += ' (%s)' % phases.phasenames[pendingphase]
6079 t += ' (%s)' % phases.phasenames[pendingphase]
6080
6080
6081 if cleanworkdir:
6081 if cleanworkdir:
6082 # i18n: column positioning for "hg summary"
6082 # i18n: column positioning for "hg summary"
6083 ui.status(_('commit: %s\n') % t.strip())
6083 ui.status(_('commit: %s\n') % t.strip())
6084 else:
6084 else:
6085 # i18n: column positioning for "hg summary"
6085 # i18n: column positioning for "hg summary"
6086 ui.write(_('commit: %s\n') % t.strip())
6086 ui.write(_('commit: %s\n') % t.strip())
6087
6087
6088 # all ancestors of branch heads - all ancestors of parent = new csets
6088 # all ancestors of branch heads - all ancestors of parent = new csets
6089 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6089 new = len(repo.changelog.findmissing([pctx.node() for pctx in parents],
6090 bheads))
6090 bheads))
6091
6091
6092 if new == 0:
6092 if new == 0:
6093 # i18n: column positioning for "hg summary"
6093 # i18n: column positioning for "hg summary"
6094 ui.status(_('update: (current)\n'))
6094 ui.status(_('update: (current)\n'))
6095 elif pnode not in bheads:
6095 elif pnode not in bheads:
6096 # i18n: column positioning for "hg summary"
6096 # i18n: column positioning for "hg summary"
6097 ui.write(_('update: %d new changesets (update)\n') % new)
6097 ui.write(_('update: %d new changesets (update)\n') % new)
6098 else:
6098 else:
6099 # i18n: column positioning for "hg summary"
6099 # i18n: column positioning for "hg summary"
6100 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6100 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
6101 (new, len(bheads)))
6101 (new, len(bheads)))
6102
6102
6103 t = []
6103 t = []
6104 draft = len(repo.revs('draft()'))
6104 draft = len(repo.revs('draft()'))
6105 if draft:
6105 if draft:
6106 t.append(_('%d draft') % draft)
6106 t.append(_('%d draft') % draft)
6107 secret = len(repo.revs('secret()'))
6107 secret = len(repo.revs('secret()'))
6108 if secret:
6108 if secret:
6109 t.append(_('%d secret') % secret)
6109 t.append(_('%d secret') % secret)
6110
6110
6111 if draft or secret:
6111 if draft or secret:
6112 ui.status(_('phases: %s\n') % ', '.join(t))
6112 ui.status(_('phases: %s\n') % ', '.join(t))
6113
6113
6114 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6114 if obsolete.isenabled(repo, obsolete.createmarkersopt):
6115 for trouble in ("unstable", "divergent", "bumped"):
6115 for trouble in ("unstable", "divergent", "bumped"):
6116 numtrouble = len(repo.revs(trouble + "()"))
6116 numtrouble = len(repo.revs(trouble + "()"))
6117 # We write all the possibilities to ease translation
6117 # We write all the possibilities to ease translation
6118 troublemsg = {
6118 troublemsg = {
6119 "unstable": _("unstable: %d changesets"),
6119 "unstable": _("unstable: %d changesets"),
6120 "divergent": _("divergent: %d changesets"),
6120 "divergent": _("divergent: %d changesets"),
6121 "bumped": _("bumped: %d changesets"),
6121 "bumped": _("bumped: %d changesets"),
6122 }
6122 }
6123 if numtrouble > 0:
6123 if numtrouble > 0:
6124 ui.status(troublemsg[trouble] % numtrouble + "\n")
6124 ui.status(troublemsg[trouble] % numtrouble + "\n")
6125
6125
6126 cmdutil.summaryhooks(ui, repo)
6126 cmdutil.summaryhooks(ui, repo)
6127
6127
6128 if opts.get('remote'):
6128 if opts.get('remote'):
6129 needsincoming, needsoutgoing = True, True
6129 needsincoming, needsoutgoing = True, True
6130 else:
6130 else:
6131 needsincoming, needsoutgoing = False, False
6131 needsincoming, needsoutgoing = False, False
6132 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6132 for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None):
6133 if i:
6133 if i:
6134 needsincoming = True
6134 needsincoming = True
6135 if o:
6135 if o:
6136 needsoutgoing = True
6136 needsoutgoing = True
6137 if not needsincoming and not needsoutgoing:
6137 if not needsincoming and not needsoutgoing:
6138 return
6138 return
6139
6139
6140 def getincoming():
6140 def getincoming():
6141 source, branches = hg.parseurl(ui.expandpath('default'))
6141 source, branches = hg.parseurl(ui.expandpath('default'))
6142 sbranch = branches[0]
6142 sbranch = branches[0]
6143 try:
6143 try:
6144 other = hg.peer(repo, {}, source)
6144 other = hg.peer(repo, {}, source)
6145 except error.RepoError:
6145 except error.RepoError:
6146 if opts.get('remote'):
6146 if opts.get('remote'):
6147 raise
6147 raise
6148 return source, sbranch, None, None, None
6148 return source, sbranch, None, None, None
6149 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6149 revs, checkout = hg.addbranchrevs(repo, other, branches, None)
6150 if revs:
6150 if revs:
6151 revs = [other.lookup(rev) for rev in revs]
6151 revs = [other.lookup(rev) for rev in revs]
6152 ui.debug('comparing with %s\n' % util.hidepassword(source))
6152 ui.debug('comparing with %s\n' % util.hidepassword(source))
6153 repo.ui.pushbuffer()
6153 repo.ui.pushbuffer()
6154 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6154 commoninc = discovery.findcommonincoming(repo, other, heads=revs)
6155 repo.ui.popbuffer()
6155 repo.ui.popbuffer()
6156 return source, sbranch, other, commoninc, commoninc[1]
6156 return source, sbranch, other, commoninc, commoninc[1]
6157
6157
6158 if needsincoming:
6158 if needsincoming:
6159 source, sbranch, sother, commoninc, incoming = getincoming()
6159 source, sbranch, sother, commoninc, incoming = getincoming()
6160 else:
6160 else:
6161 source = sbranch = sother = commoninc = incoming = None
6161 source = sbranch = sother = commoninc = incoming = None
6162
6162
6163 def getoutgoing():
6163 def getoutgoing():
6164 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6164 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
6165 dbranch = branches[0]
6165 dbranch = branches[0]
6166 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6166 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
6167 if source != dest:
6167 if source != dest:
6168 try:
6168 try:
6169 dother = hg.peer(repo, {}, dest)
6169 dother = hg.peer(repo, {}, dest)
6170 except error.RepoError:
6170 except error.RepoError:
6171 if opts.get('remote'):
6171 if opts.get('remote'):
6172 raise
6172 raise
6173 return dest, dbranch, None, None
6173 return dest, dbranch, None, None
6174 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6174 ui.debug('comparing with %s\n' % util.hidepassword(dest))
6175 elif sother is None:
6175 elif sother is None:
6176 # there is no explicit destination peer, but source one is invalid
6176 # there is no explicit destination peer, but source one is invalid
6177 return dest, dbranch, None, None
6177 return dest, dbranch, None, None
6178 else:
6178 else:
6179 dother = sother
6179 dother = sother
6180 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6180 if (source != dest or (sbranch is not None and sbranch != dbranch)):
6181 common = None
6181 common = None
6182 else:
6182 else:
6183 common = commoninc
6183 common = commoninc
6184 if revs:
6184 if revs:
6185 revs = [repo.lookup(rev) for rev in revs]
6185 revs = [repo.lookup(rev) for rev in revs]
6186 repo.ui.pushbuffer()
6186 repo.ui.pushbuffer()
6187 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6187 outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs,
6188 commoninc=common)
6188 commoninc=common)
6189 repo.ui.popbuffer()
6189 repo.ui.popbuffer()
6190 return dest, dbranch, dother, outgoing
6190 return dest, dbranch, dother, outgoing
6191
6191
6192 if needsoutgoing:
6192 if needsoutgoing:
6193 dest, dbranch, dother, outgoing = getoutgoing()
6193 dest, dbranch, dother, outgoing = getoutgoing()
6194 else:
6194 else:
6195 dest = dbranch = dother = outgoing = None
6195 dest = dbranch = dother = outgoing = None
6196
6196
6197 if opts.get('remote'):
6197 if opts.get('remote'):
6198 t = []
6198 t = []
6199 if incoming:
6199 if incoming:
6200 t.append(_('1 or more incoming'))
6200 t.append(_('1 or more incoming'))
6201 o = outgoing.missing
6201 o = outgoing.missing
6202 if o:
6202 if o:
6203 t.append(_('%d outgoing') % len(o))
6203 t.append(_('%d outgoing') % len(o))
6204 other = dother or sother
6204 other = dother or sother
6205 if 'bookmarks' in other.listkeys('namespaces'):
6205 if 'bookmarks' in other.listkeys('namespaces'):
6206 counts = bookmarks.summary(repo, other)
6206 counts = bookmarks.summary(repo, other)
6207 if counts[0] > 0:
6207 if counts[0] > 0:
6208 t.append(_('%d incoming bookmarks') % counts[0])
6208 t.append(_('%d incoming bookmarks') % counts[0])
6209 if counts[1] > 0:
6209 if counts[1] > 0:
6210 t.append(_('%d outgoing bookmarks') % counts[1])
6210 t.append(_('%d outgoing bookmarks') % counts[1])
6211
6211
6212 if t:
6212 if t:
6213 # i18n: column positioning for "hg summary"
6213 # i18n: column positioning for "hg summary"
6214 ui.write(_('remote: %s\n') % (', '.join(t)))
6214 ui.write(_('remote: %s\n') % (', '.join(t)))
6215 else:
6215 else:
6216 # i18n: column positioning for "hg summary"
6216 # i18n: column positioning for "hg summary"
6217 ui.status(_('remote: (synced)\n'))
6217 ui.status(_('remote: (synced)\n'))
6218
6218
6219 cmdutil.summaryremotehooks(ui, repo, opts,
6219 cmdutil.summaryremotehooks(ui, repo, opts,
6220 ((source, sbranch, sother, commoninc),
6220 ((source, sbranch, sother, commoninc),
6221 (dest, dbranch, dother, outgoing)))
6221 (dest, dbranch, dother, outgoing)))
6222
6222
6223 @command('tag',
6223 @command('tag',
6224 [('f', 'force', None, _('force tag')),
6224 [('f', 'force', None, _('force tag')),
6225 ('l', 'local', None, _('make the tag local')),
6225 ('l', 'local', None, _('make the tag local')),
6226 ('r', 'rev', '', _('revision to tag'), _('REV')),
6226 ('r', 'rev', '', _('revision to tag'), _('REV')),
6227 ('', 'remove', None, _('remove a tag')),
6227 ('', 'remove', None, _('remove a tag')),
6228 # -l/--local is already there, commitopts cannot be used
6228 # -l/--local is already there, commitopts cannot be used
6229 ('e', 'edit', None, _('invoke editor on commit messages')),
6229 ('e', 'edit', None, _('invoke editor on commit messages')),
6230 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6230 ('m', 'message', '', _('use text as commit message'), _('TEXT')),
6231 ] + commitopts2,
6231 ] + commitopts2,
6232 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6232 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...'))
6233 def tag(ui, repo, name1, *names, **opts):
6233 def tag(ui, repo, name1, *names, **opts):
6234 """add one or more tags for the current or given revision
6234 """add one or more tags for the current or given revision
6235
6235
6236 Name a particular revision using <name>.
6236 Name a particular revision using <name>.
6237
6237
6238 Tags are used to name particular revisions of the repository and are
6238 Tags are used to name particular revisions of the repository and are
6239 very useful to compare different revisions, to go back to significant
6239 very useful to compare different revisions, to go back to significant
6240 earlier versions or to mark branch points as releases, etc. Changing
6240 earlier versions or to mark branch points as releases, etc. Changing
6241 an existing tag is normally disallowed; use -f/--force to override.
6241 an existing tag is normally disallowed; use -f/--force to override.
6242
6242
6243 If no revision is given, the parent of the working directory is
6243 If no revision is given, the parent of the working directory is
6244 used.
6244 used.
6245
6245
6246 To facilitate version control, distribution, and merging of tags,
6246 To facilitate version control, distribution, and merging of tags,
6247 they are stored as a file named ".hgtags" which is managed similarly
6247 they are stored as a file named ".hgtags" which is managed similarly
6248 to other project files and can be hand-edited if necessary. This
6248 to other project files and can be hand-edited if necessary. This
6249 also means that tagging creates a new commit. The file
6249 also means that tagging creates a new commit. The file
6250 ".hg/localtags" is used for local tags (not shared among
6250 ".hg/localtags" is used for local tags (not shared among
6251 repositories).
6251 repositories).
6252
6252
6253 Tag commits are usually made at the head of a branch. If the parent
6253 Tag commits are usually made at the head of a branch. If the parent
6254 of the working directory is not a branch head, :hg:`tag` aborts; use
6254 of the working directory is not a branch head, :hg:`tag` aborts; use
6255 -f/--force to force the tag commit to be based on a non-head
6255 -f/--force to force the tag commit to be based on a non-head
6256 changeset.
6256 changeset.
6257
6257
6258 See :hg:`help dates` for a list of formats valid for -d/--date.
6258 See :hg:`help dates` for a list of formats valid for -d/--date.
6259
6259
6260 Since tag names have priority over branch names during revision
6260 Since tag names have priority over branch names during revision
6261 lookup, using an existing branch name as a tag name is discouraged.
6261 lookup, using an existing branch name as a tag name is discouraged.
6262
6262
6263 Returns 0 on success.
6263 Returns 0 on success.
6264 """
6264 """
6265 wlock = lock = None
6265 wlock = lock = None
6266 try:
6266 try:
6267 wlock = repo.wlock()
6267 wlock = repo.wlock()
6268 lock = repo.lock()
6268 lock = repo.lock()
6269 rev_ = "."
6269 rev_ = "."
6270 names = [t.strip() for t in (name1,) + names]
6270 names = [t.strip() for t in (name1,) + names]
6271 if len(names) != len(set(names)):
6271 if len(names) != len(set(names)):
6272 raise error.Abort(_('tag names must be unique'))
6272 raise error.Abort(_('tag names must be unique'))
6273 for n in names:
6273 for n in names:
6274 scmutil.checknewlabel(repo, n, 'tag')
6274 scmutil.checknewlabel(repo, n, 'tag')
6275 if not n:
6275 if not n:
6276 raise error.Abort(_('tag names cannot consist entirely of '
6276 raise error.Abort(_('tag names cannot consist entirely of '
6277 'whitespace'))
6277 'whitespace'))
6278 if opts.get('rev') and opts.get('remove'):
6278 if opts.get('rev') and opts.get('remove'):
6279 raise error.Abort(_("--rev and --remove are incompatible"))
6279 raise error.Abort(_("--rev and --remove are incompatible"))
6280 if opts.get('rev'):
6280 if opts.get('rev'):
6281 rev_ = opts['rev']
6281 rev_ = opts['rev']
6282 message = opts.get('message')
6282 message = opts.get('message')
6283 if opts.get('remove'):
6283 if opts.get('remove'):
6284 if opts.get('local'):
6284 if opts.get('local'):
6285 expectedtype = 'local'
6285 expectedtype = 'local'
6286 else:
6286 else:
6287 expectedtype = 'global'
6287 expectedtype = 'global'
6288
6288
6289 for n in names:
6289 for n in names:
6290 if not repo.tagtype(n):
6290 if not repo.tagtype(n):
6291 raise error.Abort(_("tag '%s' does not exist") % n)
6291 raise error.Abort(_("tag '%s' does not exist") % n)
6292 if repo.tagtype(n) != expectedtype:
6292 if repo.tagtype(n) != expectedtype:
6293 if expectedtype == 'global':
6293 if expectedtype == 'global':
6294 raise error.Abort(_("tag '%s' is not a global tag") % n)
6294 raise error.Abort(_("tag '%s' is not a global tag") % n)
6295 else:
6295 else:
6296 raise error.Abort(_("tag '%s' is not a local tag") % n)
6296 raise error.Abort(_("tag '%s' is not a local tag") % n)
6297 rev_ = 'null'
6297 rev_ = 'null'
6298 if not message:
6298 if not message:
6299 # we don't translate commit messages
6299 # we don't translate commit messages
6300 message = 'Removed tag %s' % ', '.join(names)
6300 message = 'Removed tag %s' % ', '.join(names)
6301 elif not opts.get('force'):
6301 elif not opts.get('force'):
6302 for n in names:
6302 for n in names:
6303 if n in repo.tags():
6303 if n in repo.tags():
6304 raise error.Abort(_("tag '%s' already exists "
6304 raise error.Abort(_("tag '%s' already exists "
6305 "(use -f to force)") % n)
6305 "(use -f to force)") % n)
6306 if not opts.get('local'):
6306 if not opts.get('local'):
6307 p1, p2 = repo.dirstate.parents()
6307 p1, p2 = repo.dirstate.parents()
6308 if p2 != nullid:
6308 if p2 != nullid:
6309 raise error.Abort(_('uncommitted merge'))
6309 raise error.Abort(_('uncommitted merge'))
6310 bheads = repo.branchheads()
6310 bheads = repo.branchheads()
6311 if not opts.get('force') and bheads and p1 not in bheads:
6311 if not opts.get('force') and bheads and p1 not in bheads:
6312 raise error.Abort(_('working directory is not at a branch head '
6312 raise error.Abort(_('working directory is not at a branch head '
6313 '(use -f to force)'))
6313 '(use -f to force)'))
6314 r = scmutil.revsingle(repo, rev_).node()
6314 r = scmutil.revsingle(repo, rev_).node()
6315
6315
6316 if not message:
6316 if not message:
6317 # we don't translate commit messages
6317 # we don't translate commit messages
6318 message = ('Added tag %s for changeset %s' %
6318 message = ('Added tag %s for changeset %s' %
6319 (', '.join(names), short(r)))
6319 (', '.join(names), short(r)))
6320
6320
6321 date = opts.get('date')
6321 date = opts.get('date')
6322 if date:
6322 if date:
6323 date = util.parsedate(date)
6323 date = util.parsedate(date)
6324
6324
6325 if opts.get('remove'):
6325 if opts.get('remove'):
6326 editform = 'tag.remove'
6326 editform = 'tag.remove'
6327 else:
6327 else:
6328 editform = 'tag.add'
6328 editform = 'tag.add'
6329 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6329 editor = cmdutil.getcommiteditor(editform=editform, **opts)
6330
6330
6331 # don't allow tagging the null rev
6331 # don't allow tagging the null rev
6332 if (not opts.get('remove') and
6332 if (not opts.get('remove') and
6333 scmutil.revsingle(repo, rev_).rev() == nullrev):
6333 scmutil.revsingle(repo, rev_).rev() == nullrev):
6334 raise error.Abort(_("cannot tag null revision"))
6334 raise error.Abort(_("cannot tag null revision"))
6335
6335
6336 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6336 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date,
6337 editor=editor)
6337 editor=editor)
6338 finally:
6338 finally:
6339 release(lock, wlock)
6339 release(lock, wlock)
6340
6340
6341 @command('tags', formatteropts, '')
6341 @command('tags', formatteropts, '')
6342 def tags(ui, repo, **opts):
6342 def tags(ui, repo, **opts):
6343 """list repository tags
6343 """list repository tags
6344
6344
6345 This lists both regular and local tags. When the -v/--verbose
6345 This lists both regular and local tags. When the -v/--verbose
6346 switch is used, a third column "local" is printed for local tags.
6346 switch is used, a third column "local" is printed for local tags.
6347 When the -q/--quiet switch is used, only the tag name is printed.
6347 When the -q/--quiet switch is used, only the tag name is printed.
6348
6348
6349 Returns 0 on success.
6349 Returns 0 on success.
6350 """
6350 """
6351
6351
6352 fm = ui.formatter('tags', opts)
6352 fm = ui.formatter('tags', opts)
6353 hexfunc = fm.hexfunc
6353 hexfunc = fm.hexfunc
6354 tagtype = ""
6354 tagtype = ""
6355
6355
6356 for t, n in reversed(repo.tagslist()):
6356 for t, n in reversed(repo.tagslist()):
6357 hn = hexfunc(n)
6357 hn = hexfunc(n)
6358 label = 'tags.normal'
6358 label = 'tags.normal'
6359 tagtype = ''
6359 tagtype = ''
6360 if repo.tagtype(t) == 'local':
6360 if repo.tagtype(t) == 'local':
6361 label = 'tags.local'
6361 label = 'tags.local'
6362 tagtype = 'local'
6362 tagtype = 'local'
6363
6363
6364 fm.startitem()
6364 fm.startitem()
6365 fm.write('tag', '%s', t, label=label)
6365 fm.write('tag', '%s', t, label=label)
6366 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6366 fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s'
6367 fm.condwrite(not ui.quiet, 'rev node', fmt,
6367 fm.condwrite(not ui.quiet, 'rev node', fmt,
6368 repo.changelog.rev(n), hn, label=label)
6368 repo.changelog.rev(n), hn, label=label)
6369 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6369 fm.condwrite(ui.verbose and tagtype, 'type', ' %s',
6370 tagtype, label=label)
6370 tagtype, label=label)
6371 fm.plain('\n')
6371 fm.plain('\n')
6372 fm.end()
6372 fm.end()
6373
6373
6374 @command('tip',
6374 @command('tip',
6375 [('p', 'patch', None, _('show patch')),
6375 [('p', 'patch', None, _('show patch')),
6376 ('g', 'git', None, _('use git extended diff format')),
6376 ('g', 'git', None, _('use git extended diff format')),
6377 ] + templateopts,
6377 ] + templateopts,
6378 _('[-p] [-g]'))
6378 _('[-p] [-g]'))
6379 def tip(ui, repo, **opts):
6379 def tip(ui, repo, **opts):
6380 """show the tip revision (DEPRECATED)
6380 """show the tip revision (DEPRECATED)
6381
6381
6382 The tip revision (usually just called the tip) is the changeset
6382 The tip revision (usually just called the tip) is the changeset
6383 most recently added to the repository (and therefore the most
6383 most recently added to the repository (and therefore the most
6384 recently changed head).
6384 recently changed head).
6385
6385
6386 If you have just made a commit, that commit will be the tip. If
6386 If you have just made a commit, that commit will be the tip. If
6387 you have just pulled changes from another repository, the tip of
6387 you have just pulled changes from another repository, the tip of
6388 that repository becomes the current tip. The "tip" tag is special
6388 that repository becomes the current tip. The "tip" tag is special
6389 and cannot be renamed or assigned to a different changeset.
6389 and cannot be renamed or assigned to a different changeset.
6390
6390
6391 This command is deprecated, please use :hg:`heads` instead.
6391 This command is deprecated, please use :hg:`heads` instead.
6392
6392
6393 Returns 0 on success.
6393 Returns 0 on success.
6394 """
6394 """
6395 displayer = cmdutil.show_changeset(ui, repo, opts)
6395 displayer = cmdutil.show_changeset(ui, repo, opts)
6396 displayer.show(repo['tip'])
6396 displayer.show(repo['tip'])
6397 displayer.close()
6397 displayer.close()
6398
6398
6399 @command('unbundle',
6399 @command('unbundle',
6400 [('u', 'update', None,
6400 [('u', 'update', None,
6401 _('update to new branch head if changesets were unbundled'))],
6401 _('update to new branch head if changesets were unbundled'))],
6402 _('[-u] FILE...'))
6402 _('[-u] FILE...'))
6403 def unbundle(ui, repo, fname1, *fnames, **opts):
6403 def unbundle(ui, repo, fname1, *fnames, **opts):
6404 """apply one or more changegroup files
6404 """apply one or more changegroup files
6405
6405
6406 Apply one or more compressed changegroup files generated by the
6406 Apply one or more compressed changegroup files generated by the
6407 bundle command.
6407 bundle command.
6408
6408
6409 Returns 0 on success, 1 if an update has unresolved files.
6409 Returns 0 on success, 1 if an update has unresolved files.
6410 """
6410 """
6411 fnames = (fname1,) + fnames
6411 fnames = (fname1,) + fnames
6412
6412
6413 with repo.lock():
6413 with repo.lock():
6414 for fname in fnames:
6414 for fname in fnames:
6415 f = hg.openpath(ui, fname)
6415 f = hg.openpath(ui, fname)
6416 gen = exchange.readbundle(ui, f, fname)
6416 gen = exchange.readbundle(ui, f, fname)
6417 if isinstance(gen, bundle2.unbundle20):
6417 if isinstance(gen, bundle2.unbundle20):
6418 tr = repo.transaction('unbundle')
6418 tr = repo.transaction('unbundle')
6419 try:
6419 try:
6420 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
6420 op = bundle2.applybundle(repo, gen, tr, source='unbundle',
6421 url='bundle:' + fname)
6421 url='bundle:' + fname)
6422 tr.close()
6422 tr.close()
6423 except error.BundleUnknownFeatureError as exc:
6423 except error.BundleUnknownFeatureError as exc:
6424 raise error.Abort(_('%s: unknown bundle feature, %s')
6424 raise error.Abort(_('%s: unknown bundle feature, %s')
6425 % (fname, exc),
6425 % (fname, exc),
6426 hint=_("see https://mercurial-scm.org/"
6426 hint=_("see https://mercurial-scm.org/"
6427 "wiki/BundleFeature for more "
6427 "wiki/BundleFeature for more "
6428 "information"))
6428 "information"))
6429 finally:
6429 finally:
6430 if tr:
6430 if tr:
6431 tr.release()
6431 tr.release()
6432 changes = [r.get('return', 0)
6432 changes = [r.get('return', 0)
6433 for r in op.records['changegroup']]
6433 for r in op.records['changegroup']]
6434 modheads = changegroup.combineresults(changes)
6434 modheads = changegroup.combineresults(changes)
6435 elif isinstance(gen, streamclone.streamcloneapplier):
6435 elif isinstance(gen, streamclone.streamcloneapplier):
6436 raise error.Abort(
6436 raise error.Abort(
6437 _('packed bundles cannot be applied with '
6437 _('packed bundles cannot be applied with '
6438 '"hg unbundle"'),
6438 '"hg unbundle"'),
6439 hint=_('use "hg debugapplystreamclonebundle"'))
6439 hint=_('use "hg debugapplystreamclonebundle"'))
6440 else:
6440 else:
6441 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
6441 modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname)
6442
6442
6443 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
6443 return postincoming(ui, repo, modheads, opts.get('update'), None, None)
6444
6444
6445 @command('^update|up|checkout|co',
6445 @command('^update|up|checkout|co',
6446 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6446 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
6447 ('c', 'check', None, _('require clean working directory')),
6447 ('c', 'check', None, _('require clean working directory')),
6448 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6448 ('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
6449 ('r', 'rev', '', _('revision'), _('REV'))
6449 ('r', 'rev', '', _('revision'), _('REV'))
6450 ] + mergetoolopts,
6450 ] + mergetoolopts,
6451 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6451 _('[-c] [-C] [-d DATE] [[-r] REV]'))
6452 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6452 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False,
6453 tool=None):
6453 tool=None):
6454 """update working directory (or switch revisions)
6454 """update working directory (or switch revisions)
6455
6455
6456 Update the repository's working directory to the specified
6456 Update the repository's working directory to the specified
6457 changeset. If no changeset is specified, update to the tip of the
6457 changeset. If no changeset is specified, update to the tip of the
6458 current named branch and move the active bookmark (see :hg:`help
6458 current named branch and move the active bookmark (see :hg:`help
6459 bookmarks`).
6459 bookmarks`).
6460
6460
6461 Update sets the working directory's parent revision to the specified
6461 Update sets the working directory's parent revision to the specified
6462 changeset (see :hg:`help parents`).
6462 changeset (see :hg:`help parents`).
6463
6463
6464 If the changeset is not a descendant or ancestor of the working
6464 If the changeset is not a descendant or ancestor of the working
6465 directory's parent, the update is aborted. With the -c/--check
6465 directory's parent, the update is aborted. With the -c/--check
6466 option, the working directory is checked for uncommitted changes; if
6466 option, the working directory is checked for uncommitted changes; if
6467 none are found, the working directory is updated to the specified
6467 none are found, the working directory is updated to the specified
6468 changeset.
6468 changeset.
6469
6469
6470 .. container:: verbose
6470 .. container:: verbose
6471
6471
6472 The following rules apply when the working directory contains
6472 The following rules apply when the working directory contains
6473 uncommitted changes:
6473 uncommitted changes:
6474
6474
6475 1. If neither -c/--check nor -C/--clean is specified, and if
6475 1. If neither -c/--check nor -C/--clean is specified, and if
6476 the requested changeset is an ancestor or descendant of
6476 the requested changeset is an ancestor or descendant of
6477 the working directory's parent, the uncommitted changes
6477 the working directory's parent, the uncommitted changes
6478 are merged into the requested changeset and the merged
6478 are merged into the requested changeset and the merged
6479 result is left uncommitted. If the requested changeset is
6479 result is left uncommitted. If the requested changeset is
6480 not an ancestor or descendant (that is, it is on another
6480 not an ancestor or descendant (that is, it is on another
6481 branch), the update is aborted and the uncommitted changes
6481 branch), the update is aborted and the uncommitted changes
6482 are preserved.
6482 are preserved.
6483
6483
6484 2. With the -c/--check option, the update is aborted and the
6484 2. With the -c/--check option, the update is aborted and the
6485 uncommitted changes are preserved.
6485 uncommitted changes are preserved.
6486
6486
6487 3. With the -C/--clean option, uncommitted changes are discarded and
6487 3. With the -C/--clean option, uncommitted changes are discarded and
6488 the working directory is updated to the requested changeset.
6488 the working directory is updated to the requested changeset.
6489
6489
6490 To cancel an uncommitted merge (and lose your changes), use
6490 To cancel an uncommitted merge (and lose your changes), use
6491 :hg:`update --clean .`.
6491 :hg:`update --clean .`.
6492
6492
6493 Use null as the changeset to remove the working directory (like
6493 Use null as the changeset to remove the working directory (like
6494 :hg:`clone -U`).
6494 :hg:`clone -U`).
6495
6495
6496 If you want to revert just one file to an older revision, use
6496 If you want to revert just one file to an older revision, use
6497 :hg:`revert [-r REV] NAME`.
6497 :hg:`revert [-r REV] NAME`.
6498
6498
6499 See :hg:`help dates` for a list of formats valid for -d/--date.
6499 See :hg:`help dates` for a list of formats valid for -d/--date.
6500
6500
6501 Returns 0 on success, 1 if there are unresolved files.
6501 Returns 0 on success, 1 if there are unresolved files.
6502 """
6502 """
6503 if rev and node:
6503 if rev and node:
6504 raise error.Abort(_("please specify just one revision"))
6504 raise error.Abort(_("please specify just one revision"))
6505
6505
6506 if rev is None or rev == '':
6506 if rev is None or rev == '':
6507 rev = node
6507 rev = node
6508
6508
6509 if date and rev is not None:
6509 if date and rev is not None:
6510 raise error.Abort(_("you can't specify a revision and a date"))
6510 raise error.Abort(_("you can't specify a revision and a date"))
6511
6511
6512 if check and clean:
6512 if check and clean:
6513 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
6513 raise error.Abort(_("cannot specify both -c/--check and -C/--clean"))
6514
6514
6515 with repo.wlock():
6515 with repo.wlock():
6516 cmdutil.clearunfinished(repo)
6516 cmdutil.clearunfinished(repo)
6517
6517
6518 if date:
6518 if date:
6519 rev = cmdutil.finddate(ui, repo, date)
6519 rev = cmdutil.finddate(ui, repo, date)
6520
6520
6521 # if we defined a bookmark, we have to remember the original name
6521 # if we defined a bookmark, we have to remember the original name
6522 brev = rev
6522 brev = rev
6523 rev = scmutil.revsingle(repo, rev, rev).rev()
6523 rev = scmutil.revsingle(repo, rev, rev).rev()
6524
6524
6525 if check:
6525 if check:
6526 cmdutil.bailifchanged(repo, merge=False)
6526 cmdutil.bailifchanged(repo, merge=False)
6527
6527
6528 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6528 repo.ui.setconfig('ui', 'forcemerge', tool, 'update')
6529
6529
6530 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
6530 return hg.updatetotally(ui, repo, rev, brev, clean=clean, check=check)
6531
6531
6532 @command('verify', [])
6532 @command('verify', [])
6533 def verify(ui, repo):
6533 def verify(ui, repo):
6534 """verify the integrity of the repository
6534 """verify the integrity of the repository
6535
6535
6536 Verify the integrity of the current repository.
6536 Verify the integrity of the current repository.
6537
6537
6538 This will perform an extensive check of the repository's
6538 This will perform an extensive check of the repository's
6539 integrity, validating the hashes and checksums of each entry in
6539 integrity, validating the hashes and checksums of each entry in
6540 the changelog, manifest, and tracked files, as well as the
6540 the changelog, manifest, and tracked files, as well as the
6541 integrity of their crosslinks and indices.
6541 integrity of their crosslinks and indices.
6542
6542
6543 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
6543 Please see https://mercurial-scm.org/wiki/RepositoryCorruption
6544 for more information about recovery from corruption of the
6544 for more information about recovery from corruption of the
6545 repository.
6545 repository.
6546
6546
6547 Returns 0 on success, 1 if errors are encountered.
6547 Returns 0 on success, 1 if errors are encountered.
6548 """
6548 """
6549 return hg.verify(repo)
6549 return hg.verify(repo)
6550
6550
6551 @command('version', [] + formatteropts, norepo=True)
6551 @command('version', [] + formatteropts, norepo=True)
6552 def version_(ui, **opts):
6552 def version_(ui, **opts):
6553 """output version and copyright information"""
6553 """output version and copyright information"""
6554 fm = ui.formatter("version", opts)
6554 fm = ui.formatter("version", opts)
6555 fm.startitem()
6555 fm.startitem()
6556 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
6556 fm.write("ver", _("Mercurial Distributed SCM (version %s)\n"),
6557 util.version())
6557 util.version())
6558 license = _(
6558 license = _(
6559 "(see https://mercurial-scm.org for more information)\n"
6559 "(see https://mercurial-scm.org for more information)\n"
6560 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
6560 "\nCopyright (C) 2005-2016 Matt Mackall and others\n"
6561 "This is free software; see the source for copying conditions. "
6561 "This is free software; see the source for copying conditions. "
6562 "There is NO\nwarranty; "
6562 "There is NO\nwarranty; "
6563 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6563 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
6564 )
6564 )
6565 if not ui.quiet:
6565 if not ui.quiet:
6566 fm.plain(license)
6566 fm.plain(license)
6567
6567
6568 if ui.verbose:
6568 if ui.verbose:
6569 fm.plain(_("\nEnabled extensions:\n\n"))
6569 fm.plain(_("\nEnabled extensions:\n\n"))
6570 # format names and versions into columns
6570 # format names and versions into columns
6571 names = []
6571 names = []
6572 vers = []
6572 vers = []
6573 isinternals = []
6573 isinternals = []
6574 for name, module in extensions.extensions():
6574 for name, module in extensions.extensions():
6575 names.append(name)
6575 names.append(name)
6576 vers.append(extensions.moduleversion(module) or None)
6576 vers.append(extensions.moduleversion(module) or None)
6577 isinternals.append(extensions.ismoduleinternal(module))
6577 isinternals.append(extensions.ismoduleinternal(module))
6578 fn = fm.nested("extensions")
6578 fn = fm.nested("extensions")
6579 if names:
6579 if names:
6580 namefmt = " %%-%ds " % max(len(n) for n in names)
6580 namefmt = " %%-%ds " % max(len(n) for n in names)
6581 places = [_("external"), _("internal")]
6581 places = [_("external"), _("internal")]
6582 for n, v, p in zip(names, vers, isinternals):
6582 for n, v, p in zip(names, vers, isinternals):
6583 fn.startitem()
6583 fn.startitem()
6584 fn.condwrite(ui.verbose, "name", namefmt, n)
6584 fn.condwrite(ui.verbose, "name", namefmt, n)
6585 if ui.verbose:
6585 if ui.verbose:
6586 fn.plain("%s " % places[p])
6586 fn.plain("%s " % places[p])
6587 fn.data(bundled=p)
6587 fn.data(bundled=p)
6588 fn.condwrite(ui.verbose and v, "ver", "%s", v)
6588 fn.condwrite(ui.verbose and v, "ver", "%s", v)
6589 if ui.verbose:
6589 if ui.verbose:
6590 fn.plain("\n")
6590 fn.plain("\n")
6591 fn.end()
6591 fn.end()
6592 fm.end()
6592 fm.end()
6593
6593
6594 def loadcmdtable(ui, name, cmdtable):
6594 def loadcmdtable(ui, name, cmdtable):
6595 """Load command functions from specified cmdtable
6595 """Load command functions from specified cmdtable
6596 """
6596 """
6597 overrides = [cmd for cmd in cmdtable if cmd in table]
6597 overrides = [cmd for cmd in cmdtable if cmd in table]
6598 if overrides:
6598 if overrides:
6599 ui.warn(_("extension '%s' overrides commands: %s\n")
6599 ui.warn(_("extension '%s' overrides commands: %s\n")
6600 % (name, " ".join(overrides)))
6600 % (name, " ".join(overrides)))
6601 table.update(cmdtable)
6601 table.update(cmdtable)
@@ -1,109 +1,109
1 # i18n.py - internationalization support for mercurial
1 # i18n.py - internationalization support for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import gettext as gettextmod
10 import gettext as gettextmod
11 import locale
11 import locale
12 import os
12 import os
13 import sys
13 import sys
14
14
15 from . import (
15 from . import (
16 encoding,
16 encoding,
17 pycompat,
17 pycompat,
18 )
18 )
19
19
20 # modelled after templater.templatepath:
20 # modelled after templater.templatepath:
21 if getattr(sys, 'frozen', None) is not None:
21 if getattr(sys, 'frozen', None) is not None:
22 module = sys.executable
22 module = pycompat.sysexecutable
23 else:
23 else:
24 module = __file__
24 module = __file__
25
25
26 try:
26 try:
27 unicode
27 unicode
28 except NameError:
28 except NameError:
29 unicode = str
29 unicode = str
30
30
31 _languages = None
31 _languages = None
32 if (pycompat.osname == 'nt'
32 if (pycompat.osname == 'nt'
33 and 'LANGUAGE' not in encoding.environ
33 and 'LANGUAGE' not in encoding.environ
34 and 'LC_ALL' not in encoding.environ
34 and 'LC_ALL' not in encoding.environ
35 and 'LC_MESSAGES' not in encoding.environ
35 and 'LC_MESSAGES' not in encoding.environ
36 and 'LANG' not in encoding.environ):
36 and 'LANG' not in encoding.environ):
37 # Try to detect UI language by "User Interface Language Management" API
37 # Try to detect UI language by "User Interface Language Management" API
38 # if no locale variables are set. Note that locale.getdefaultlocale()
38 # if no locale variables are set. Note that locale.getdefaultlocale()
39 # uses GetLocaleInfo(), which may be different from UI language.
39 # uses GetLocaleInfo(), which may be different from UI language.
40 # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx )
40 # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx )
41 try:
41 try:
42 import ctypes
42 import ctypes
43 langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
43 langid = ctypes.windll.kernel32.GetUserDefaultUILanguage()
44 _languages = [locale.windows_locale[langid]]
44 _languages = [locale.windows_locale[langid]]
45 except (ImportError, AttributeError, KeyError):
45 except (ImportError, AttributeError, KeyError):
46 # ctypes not found or unknown langid
46 # ctypes not found or unknown langid
47 pass
47 pass
48
48
49 _ugettext = None
49 _ugettext = None
50
50
51 def setdatapath(datapath):
51 def setdatapath(datapath):
52 datapath = pycompat.fsdecode(datapath)
52 datapath = pycompat.fsdecode(datapath)
53 localedir = os.path.join(datapath, pycompat.sysstr('locale'))
53 localedir = os.path.join(datapath, pycompat.sysstr('locale'))
54 t = gettextmod.translation('hg', localedir, _languages, fallback=True)
54 t = gettextmod.translation('hg', localedir, _languages, fallback=True)
55 global _ugettext
55 global _ugettext
56 try:
56 try:
57 _ugettext = t.ugettext
57 _ugettext = t.ugettext
58 except AttributeError:
58 except AttributeError:
59 _ugettext = t.gettext
59 _ugettext = t.gettext
60
60
61 _msgcache = {}
61 _msgcache = {}
62
62
63 def gettext(message):
63 def gettext(message):
64 """Translate message.
64 """Translate message.
65
65
66 The message is looked up in the catalog to get a Unicode string,
66 The message is looked up in the catalog to get a Unicode string,
67 which is encoded in the local encoding before being returned.
67 which is encoded in the local encoding before being returned.
68
68
69 Important: message is restricted to characters in the encoding
69 Important: message is restricted to characters in the encoding
70 given by sys.getdefaultencoding() which is most likely 'ascii'.
70 given by sys.getdefaultencoding() which is most likely 'ascii'.
71 """
71 """
72 # If message is None, t.ugettext will return u'None' as the
72 # If message is None, t.ugettext will return u'None' as the
73 # translation whereas our callers expect us to return None.
73 # translation whereas our callers expect us to return None.
74 if message is None or not _ugettext:
74 if message is None or not _ugettext:
75 return message
75 return message
76
76
77 if message not in _msgcache:
77 if message not in _msgcache:
78 if type(message) is unicode:
78 if type(message) is unicode:
79 # goofy unicode docstrings in test
79 # goofy unicode docstrings in test
80 paragraphs = message.split(u'\n\n')
80 paragraphs = message.split(u'\n\n')
81 else:
81 else:
82 paragraphs = [p.decode("ascii") for p in message.split('\n\n')]
82 paragraphs = [p.decode("ascii") for p in message.split('\n\n')]
83 # Be careful not to translate the empty string -- it holds the
83 # Be careful not to translate the empty string -- it holds the
84 # meta data of the .po file.
84 # meta data of the .po file.
85 u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
85 u = u'\n\n'.join([p and _ugettext(p) or u'' for p in paragraphs])
86 try:
86 try:
87 # encoding.tolocal cannot be used since it will first try to
87 # encoding.tolocal cannot be used since it will first try to
88 # decode the Unicode string. Calling u.decode(enc) really
88 # decode the Unicode string. Calling u.decode(enc) really
89 # means u.encode(sys.getdefaultencoding()).decode(enc). Since
89 # means u.encode(sys.getdefaultencoding()).decode(enc). Since
90 # the Python encoding defaults to 'ascii', this fails if the
90 # the Python encoding defaults to 'ascii', this fails if the
91 # translated string use non-ASCII characters.
91 # translated string use non-ASCII characters.
92 encodingstr = pycompat.sysstr(encoding.encoding)
92 encodingstr = pycompat.sysstr(encoding.encoding)
93 _msgcache[message] = u.encode(encodingstr, "replace")
93 _msgcache[message] = u.encode(encodingstr, "replace")
94 except LookupError:
94 except LookupError:
95 # An unknown encoding results in a LookupError.
95 # An unknown encoding results in a LookupError.
96 _msgcache[message] = message
96 _msgcache[message] = message
97 return _msgcache[message]
97 return _msgcache[message]
98
98
99 def _plain():
99 def _plain():
100 if ('HGPLAIN' not in encoding.environ
100 if ('HGPLAIN' not in encoding.environ
101 and 'HGPLAINEXCEPT' not in encoding.environ):
101 and 'HGPLAINEXCEPT' not in encoding.environ):
102 return False
102 return False
103 exceptions = encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
103 exceptions = encoding.environ.get('HGPLAINEXCEPT', '').strip().split(',')
104 return 'i18n' not in exceptions
104 return 'i18n' not in exceptions
105
105
106 if _plain():
106 if _plain():
107 _ = lambda message: message
107 _ = lambda message: message
108 else:
108 else:
109 _ = gettext
109 _ = gettext
@@ -1,847 +1,847
1 # sslutil.py - SSL handling for mercurial
1 # sslutil.py - SSL handling for mercurial
2 #
2 #
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
4 # Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import hashlib
12 import hashlib
13 import os
13 import os
14 import re
14 import re
15 import ssl
15 import ssl
16 import sys
16 import sys
17
17
18 from .i18n import _
18 from .i18n import _
19 from . import (
19 from . import (
20 error,
20 error,
21 pycompat,
21 pycompat,
22 util,
22 util,
23 )
23 )
24
24
25 # Python 2.7.9+ overhauled the built-in SSL/TLS features of Python. It added
25 # Python 2.7.9+ overhauled the built-in SSL/TLS features of Python. It added
26 # support for TLS 1.1, TLS 1.2, SNI, system CA stores, etc. These features are
26 # support for TLS 1.1, TLS 1.2, SNI, system CA stores, etc. These features are
27 # all exposed via the "ssl" module.
27 # all exposed via the "ssl" module.
28 #
28 #
29 # Depending on the version of Python being used, SSL/TLS support is either
29 # Depending on the version of Python being used, SSL/TLS support is either
30 # modern/secure or legacy/insecure. Many operations in this module have
30 # modern/secure or legacy/insecure. Many operations in this module have
31 # separate code paths depending on support in Python.
31 # separate code paths depending on support in Python.
32
32
33 configprotocols = set([
33 configprotocols = set([
34 'tls1.0',
34 'tls1.0',
35 'tls1.1',
35 'tls1.1',
36 'tls1.2',
36 'tls1.2',
37 ])
37 ])
38
38
39 hassni = getattr(ssl, 'HAS_SNI', False)
39 hassni = getattr(ssl, 'HAS_SNI', False)
40
40
41 # TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
41 # TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
42 # against doesn't support them.
42 # against doesn't support them.
43 supportedprotocols = set(['tls1.0'])
43 supportedprotocols = set(['tls1.0'])
44 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
44 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
45 supportedprotocols.add('tls1.1')
45 supportedprotocols.add('tls1.1')
46 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
46 if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
47 supportedprotocols.add('tls1.2')
47 supportedprotocols.add('tls1.2')
48
48
49 try:
49 try:
50 # ssl.SSLContext was added in 2.7.9 and presence indicates modern
50 # ssl.SSLContext was added in 2.7.9 and presence indicates modern
51 # SSL/TLS features are available.
51 # SSL/TLS features are available.
52 SSLContext = ssl.SSLContext
52 SSLContext = ssl.SSLContext
53 modernssl = True
53 modernssl = True
54 _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs')
54 _canloaddefaultcerts = util.safehasattr(SSLContext, 'load_default_certs')
55 except AttributeError:
55 except AttributeError:
56 modernssl = False
56 modernssl = False
57 _canloaddefaultcerts = False
57 _canloaddefaultcerts = False
58
58
59 # We implement SSLContext using the interface from the standard library.
59 # We implement SSLContext using the interface from the standard library.
60 class SSLContext(object):
60 class SSLContext(object):
61 # ssl.wrap_socket gained the "ciphers" named argument in 2.7.
61 # ssl.wrap_socket gained the "ciphers" named argument in 2.7.
62 _supportsciphers = sys.version_info >= (2, 7)
62 _supportsciphers = sys.version_info >= (2, 7)
63
63
64 def __init__(self, protocol):
64 def __init__(self, protocol):
65 # From the public interface of SSLContext
65 # From the public interface of SSLContext
66 self.protocol = protocol
66 self.protocol = protocol
67 self.check_hostname = False
67 self.check_hostname = False
68 self.options = 0
68 self.options = 0
69 self.verify_mode = ssl.CERT_NONE
69 self.verify_mode = ssl.CERT_NONE
70
70
71 # Used by our implementation.
71 # Used by our implementation.
72 self._certfile = None
72 self._certfile = None
73 self._keyfile = None
73 self._keyfile = None
74 self._certpassword = None
74 self._certpassword = None
75 self._cacerts = None
75 self._cacerts = None
76 self._ciphers = None
76 self._ciphers = None
77
77
78 def load_cert_chain(self, certfile, keyfile=None, password=None):
78 def load_cert_chain(self, certfile, keyfile=None, password=None):
79 self._certfile = certfile
79 self._certfile = certfile
80 self._keyfile = keyfile
80 self._keyfile = keyfile
81 self._certpassword = password
81 self._certpassword = password
82
82
83 def load_default_certs(self, purpose=None):
83 def load_default_certs(self, purpose=None):
84 pass
84 pass
85
85
86 def load_verify_locations(self, cafile=None, capath=None, cadata=None):
86 def load_verify_locations(self, cafile=None, capath=None, cadata=None):
87 if capath:
87 if capath:
88 raise error.Abort(_('capath not supported'))
88 raise error.Abort(_('capath not supported'))
89 if cadata:
89 if cadata:
90 raise error.Abort(_('cadata not supported'))
90 raise error.Abort(_('cadata not supported'))
91
91
92 self._cacerts = cafile
92 self._cacerts = cafile
93
93
94 def set_ciphers(self, ciphers):
94 def set_ciphers(self, ciphers):
95 if not self._supportsciphers:
95 if not self._supportsciphers:
96 raise error.Abort(_('setting ciphers in [hostsecurity] is not '
96 raise error.Abort(_('setting ciphers in [hostsecurity] is not '
97 'supported by this version of Python'),
97 'supported by this version of Python'),
98 hint=_('remove the config option or run '
98 hint=_('remove the config option or run '
99 'Mercurial with a modern Python '
99 'Mercurial with a modern Python '
100 'version (preferred)'))
100 'version (preferred)'))
101
101
102 self._ciphers = ciphers
102 self._ciphers = ciphers
103
103
104 def wrap_socket(self, socket, server_hostname=None, server_side=False):
104 def wrap_socket(self, socket, server_hostname=None, server_side=False):
105 # server_hostname is unique to SSLContext.wrap_socket and is used
105 # server_hostname is unique to SSLContext.wrap_socket and is used
106 # for SNI in that context. So there's nothing for us to do with it
106 # for SNI in that context. So there's nothing for us to do with it
107 # in this legacy code since we don't support SNI.
107 # in this legacy code since we don't support SNI.
108
108
109 args = {
109 args = {
110 'keyfile': self._keyfile,
110 'keyfile': self._keyfile,
111 'certfile': self._certfile,
111 'certfile': self._certfile,
112 'server_side': server_side,
112 'server_side': server_side,
113 'cert_reqs': self.verify_mode,
113 'cert_reqs': self.verify_mode,
114 'ssl_version': self.protocol,
114 'ssl_version': self.protocol,
115 'ca_certs': self._cacerts,
115 'ca_certs': self._cacerts,
116 }
116 }
117
117
118 if self._supportsciphers:
118 if self._supportsciphers:
119 args['ciphers'] = self._ciphers
119 args['ciphers'] = self._ciphers
120
120
121 return ssl.wrap_socket(socket, **args)
121 return ssl.wrap_socket(socket, **args)
122
122
123 def _hostsettings(ui, hostname):
123 def _hostsettings(ui, hostname):
124 """Obtain security settings for a hostname.
124 """Obtain security settings for a hostname.
125
125
126 Returns a dict of settings relevant to that hostname.
126 Returns a dict of settings relevant to that hostname.
127 """
127 """
128 s = {
128 s = {
129 # Whether we should attempt to load default/available CA certs
129 # Whether we should attempt to load default/available CA certs
130 # if an explicit ``cafile`` is not defined.
130 # if an explicit ``cafile`` is not defined.
131 'allowloaddefaultcerts': True,
131 'allowloaddefaultcerts': True,
132 # List of 2-tuple of (hash algorithm, hash).
132 # List of 2-tuple of (hash algorithm, hash).
133 'certfingerprints': [],
133 'certfingerprints': [],
134 # Path to file containing concatenated CA certs. Used by
134 # Path to file containing concatenated CA certs. Used by
135 # SSLContext.load_verify_locations().
135 # SSLContext.load_verify_locations().
136 'cafile': None,
136 'cafile': None,
137 # Whether certificate verification should be disabled.
137 # Whether certificate verification should be disabled.
138 'disablecertverification': False,
138 'disablecertverification': False,
139 # Whether the legacy [hostfingerprints] section has data for this host.
139 # Whether the legacy [hostfingerprints] section has data for this host.
140 'legacyfingerprint': False,
140 'legacyfingerprint': False,
141 # PROTOCOL_* constant to use for SSLContext.__init__.
141 # PROTOCOL_* constant to use for SSLContext.__init__.
142 'protocol': None,
142 'protocol': None,
143 # String representation of minimum protocol to be used for UI
143 # String representation of minimum protocol to be used for UI
144 # presentation.
144 # presentation.
145 'protocolui': None,
145 'protocolui': None,
146 # ssl.CERT_* constant used by SSLContext.verify_mode.
146 # ssl.CERT_* constant used by SSLContext.verify_mode.
147 'verifymode': None,
147 'verifymode': None,
148 # Defines extra ssl.OP* bitwise options to set.
148 # Defines extra ssl.OP* bitwise options to set.
149 'ctxoptions': None,
149 'ctxoptions': None,
150 # OpenSSL Cipher List to use (instead of default).
150 # OpenSSL Cipher List to use (instead of default).
151 'ciphers': None,
151 'ciphers': None,
152 }
152 }
153
153
154 # Allow minimum TLS protocol to be specified in the config.
154 # Allow minimum TLS protocol to be specified in the config.
155 def validateprotocol(protocol, key):
155 def validateprotocol(protocol, key):
156 if protocol not in configprotocols:
156 if protocol not in configprotocols:
157 raise error.Abort(
157 raise error.Abort(
158 _('unsupported protocol from hostsecurity.%s: %s') %
158 _('unsupported protocol from hostsecurity.%s: %s') %
159 (key, protocol),
159 (key, protocol),
160 hint=_('valid protocols: %s') %
160 hint=_('valid protocols: %s') %
161 ' '.join(sorted(configprotocols)))
161 ' '.join(sorted(configprotocols)))
162
162
163 # We default to TLS 1.1+ where we can because TLS 1.0 has known
163 # We default to TLS 1.1+ where we can because TLS 1.0 has known
164 # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
164 # vulnerabilities (like BEAST and POODLE). We allow users to downgrade to
165 # TLS 1.0+ via config options in case a legacy server is encountered.
165 # TLS 1.0+ via config options in case a legacy server is encountered.
166 if 'tls1.1' in supportedprotocols:
166 if 'tls1.1' in supportedprotocols:
167 defaultprotocol = 'tls1.1'
167 defaultprotocol = 'tls1.1'
168 else:
168 else:
169 # Let people know they are borderline secure.
169 # Let people know they are borderline secure.
170 # We don't document this config option because we want people to see
170 # We don't document this config option because we want people to see
171 # the bold warnings on the web site.
171 # the bold warnings on the web site.
172 # internal config: hostsecurity.disabletls10warning
172 # internal config: hostsecurity.disabletls10warning
173 if not ui.configbool('hostsecurity', 'disabletls10warning'):
173 if not ui.configbool('hostsecurity', 'disabletls10warning'):
174 ui.warn(_('warning: connecting to %s using legacy security '
174 ui.warn(_('warning: connecting to %s using legacy security '
175 'technology (TLS 1.0); see '
175 'technology (TLS 1.0); see '
176 'https://mercurial-scm.org/wiki/SecureConnections for '
176 'https://mercurial-scm.org/wiki/SecureConnections for '
177 'more info\n') % hostname)
177 'more info\n') % hostname)
178 defaultprotocol = 'tls1.0'
178 defaultprotocol = 'tls1.0'
179
179
180 key = 'minimumprotocol'
180 key = 'minimumprotocol'
181 protocol = ui.config('hostsecurity', key, defaultprotocol)
181 protocol = ui.config('hostsecurity', key, defaultprotocol)
182 validateprotocol(protocol, key)
182 validateprotocol(protocol, key)
183
183
184 key = '%s:minimumprotocol' % hostname
184 key = '%s:minimumprotocol' % hostname
185 protocol = ui.config('hostsecurity', key, protocol)
185 protocol = ui.config('hostsecurity', key, protocol)
186 validateprotocol(protocol, key)
186 validateprotocol(protocol, key)
187
187
188 # If --insecure is used, we allow the use of TLS 1.0 despite config options.
188 # If --insecure is used, we allow the use of TLS 1.0 despite config options.
189 # We always print a "connection security to %s is disabled..." message when
189 # We always print a "connection security to %s is disabled..." message when
190 # --insecure is used. So no need to print anything more here.
190 # --insecure is used. So no need to print anything more here.
191 if ui.insecureconnections:
191 if ui.insecureconnections:
192 protocol = 'tls1.0'
192 protocol = 'tls1.0'
193
193
194 s['protocol'], s['ctxoptions'], s['protocolui'] = protocolsettings(protocol)
194 s['protocol'], s['ctxoptions'], s['protocolui'] = protocolsettings(protocol)
195
195
196 ciphers = ui.config('hostsecurity', 'ciphers')
196 ciphers = ui.config('hostsecurity', 'ciphers')
197 ciphers = ui.config('hostsecurity', '%s:ciphers' % hostname, ciphers)
197 ciphers = ui.config('hostsecurity', '%s:ciphers' % hostname, ciphers)
198 s['ciphers'] = ciphers
198 s['ciphers'] = ciphers
199
199
200 # Look for fingerprints in [hostsecurity] section. Value is a list
200 # Look for fingerprints in [hostsecurity] section. Value is a list
201 # of <alg>:<fingerprint> strings.
201 # of <alg>:<fingerprint> strings.
202 fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % hostname,
202 fingerprints = ui.configlist('hostsecurity', '%s:fingerprints' % hostname,
203 [])
203 [])
204 for fingerprint in fingerprints:
204 for fingerprint in fingerprints:
205 if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
205 if not (fingerprint.startswith(('sha1:', 'sha256:', 'sha512:'))):
206 raise error.Abort(_('invalid fingerprint for %s: %s') % (
206 raise error.Abort(_('invalid fingerprint for %s: %s') % (
207 hostname, fingerprint),
207 hostname, fingerprint),
208 hint=_('must begin with "sha1:", "sha256:", '
208 hint=_('must begin with "sha1:", "sha256:", '
209 'or "sha512:"'))
209 'or "sha512:"'))
210
210
211 alg, fingerprint = fingerprint.split(':', 1)
211 alg, fingerprint = fingerprint.split(':', 1)
212 fingerprint = fingerprint.replace(':', '').lower()
212 fingerprint = fingerprint.replace(':', '').lower()
213 s['certfingerprints'].append((alg, fingerprint))
213 s['certfingerprints'].append((alg, fingerprint))
214
214
215 # Fingerprints from [hostfingerprints] are always SHA-1.
215 # Fingerprints from [hostfingerprints] are always SHA-1.
216 for fingerprint in ui.configlist('hostfingerprints', hostname, []):
216 for fingerprint in ui.configlist('hostfingerprints', hostname, []):
217 fingerprint = fingerprint.replace(':', '').lower()
217 fingerprint = fingerprint.replace(':', '').lower()
218 s['certfingerprints'].append(('sha1', fingerprint))
218 s['certfingerprints'].append(('sha1', fingerprint))
219 s['legacyfingerprint'] = True
219 s['legacyfingerprint'] = True
220
220
221 # If a host cert fingerprint is defined, it is the only thing that
221 # If a host cert fingerprint is defined, it is the only thing that
222 # matters. No need to validate CA certs.
222 # matters. No need to validate CA certs.
223 if s['certfingerprints']:
223 if s['certfingerprints']:
224 s['verifymode'] = ssl.CERT_NONE
224 s['verifymode'] = ssl.CERT_NONE
225 s['allowloaddefaultcerts'] = False
225 s['allowloaddefaultcerts'] = False
226
226
227 # If --insecure is used, don't take CAs into consideration.
227 # If --insecure is used, don't take CAs into consideration.
228 elif ui.insecureconnections:
228 elif ui.insecureconnections:
229 s['disablecertverification'] = True
229 s['disablecertverification'] = True
230 s['verifymode'] = ssl.CERT_NONE
230 s['verifymode'] = ssl.CERT_NONE
231 s['allowloaddefaultcerts'] = False
231 s['allowloaddefaultcerts'] = False
232
232
233 if ui.configbool('devel', 'disableloaddefaultcerts'):
233 if ui.configbool('devel', 'disableloaddefaultcerts'):
234 s['allowloaddefaultcerts'] = False
234 s['allowloaddefaultcerts'] = False
235
235
236 # If both fingerprints and a per-host ca file are specified, issue a warning
236 # If both fingerprints and a per-host ca file are specified, issue a warning
237 # because users should not be surprised about what security is or isn't
237 # because users should not be surprised about what security is or isn't
238 # being performed.
238 # being performed.
239 cafile = ui.config('hostsecurity', '%s:verifycertsfile' % hostname)
239 cafile = ui.config('hostsecurity', '%s:verifycertsfile' % hostname)
240 if s['certfingerprints'] and cafile:
240 if s['certfingerprints'] and cafile:
241 ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
241 ui.warn(_('(hostsecurity.%s:verifycertsfile ignored when host '
242 'fingerprints defined; using host fingerprints for '
242 'fingerprints defined; using host fingerprints for '
243 'verification)\n') % hostname)
243 'verification)\n') % hostname)
244
244
245 # Try to hook up CA certificate validation unless something above
245 # Try to hook up CA certificate validation unless something above
246 # makes it not necessary.
246 # makes it not necessary.
247 if s['verifymode'] is None:
247 if s['verifymode'] is None:
248 # Look at per-host ca file first.
248 # Look at per-host ca file first.
249 if cafile:
249 if cafile:
250 cafile = util.expandpath(cafile)
250 cafile = util.expandpath(cafile)
251 if not os.path.exists(cafile):
251 if not os.path.exists(cafile):
252 raise error.Abort(_('path specified by %s does not exist: %s') %
252 raise error.Abort(_('path specified by %s does not exist: %s') %
253 ('hostsecurity.%s:verifycertsfile' % hostname,
253 ('hostsecurity.%s:verifycertsfile' % hostname,
254 cafile))
254 cafile))
255 s['cafile'] = cafile
255 s['cafile'] = cafile
256 else:
256 else:
257 # Find global certificates file in config.
257 # Find global certificates file in config.
258 cafile = ui.config('web', 'cacerts')
258 cafile = ui.config('web', 'cacerts')
259
259
260 if cafile:
260 if cafile:
261 cafile = util.expandpath(cafile)
261 cafile = util.expandpath(cafile)
262 if not os.path.exists(cafile):
262 if not os.path.exists(cafile):
263 raise error.Abort(_('could not find web.cacerts: %s') %
263 raise error.Abort(_('could not find web.cacerts: %s') %
264 cafile)
264 cafile)
265 elif s['allowloaddefaultcerts']:
265 elif s['allowloaddefaultcerts']:
266 # CAs not defined in config. Try to find system bundles.
266 # CAs not defined in config. Try to find system bundles.
267 cafile = _defaultcacerts(ui)
267 cafile = _defaultcacerts(ui)
268 if cafile:
268 if cafile:
269 ui.debug('using %s for CA file\n' % cafile)
269 ui.debug('using %s for CA file\n' % cafile)
270
270
271 s['cafile'] = cafile
271 s['cafile'] = cafile
272
272
273 # Require certificate validation if CA certs are being loaded and
273 # Require certificate validation if CA certs are being loaded and
274 # verification hasn't been disabled above.
274 # verification hasn't been disabled above.
275 if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
275 if cafile or (_canloaddefaultcerts and s['allowloaddefaultcerts']):
276 s['verifymode'] = ssl.CERT_REQUIRED
276 s['verifymode'] = ssl.CERT_REQUIRED
277 else:
277 else:
278 # At this point we don't have a fingerprint, aren't being
278 # At this point we don't have a fingerprint, aren't being
279 # explicitly insecure, and can't load CA certs. Connecting
279 # explicitly insecure, and can't load CA certs. Connecting
280 # is insecure. We allow the connection and abort during
280 # is insecure. We allow the connection and abort during
281 # validation (once we have the fingerprint to print to the
281 # validation (once we have the fingerprint to print to the
282 # user).
282 # user).
283 s['verifymode'] = ssl.CERT_NONE
283 s['verifymode'] = ssl.CERT_NONE
284
284
285 assert s['protocol'] is not None
285 assert s['protocol'] is not None
286 assert s['ctxoptions'] is not None
286 assert s['ctxoptions'] is not None
287 assert s['verifymode'] is not None
287 assert s['verifymode'] is not None
288
288
289 return s
289 return s
290
290
291 def protocolsettings(protocol):
291 def protocolsettings(protocol):
292 """Resolve the protocol for a config value.
292 """Resolve the protocol for a config value.
293
293
294 Returns a 3-tuple of (protocol, options, ui value) where the first
294 Returns a 3-tuple of (protocol, options, ui value) where the first
295 2 items are values used by SSLContext and the last is a string value
295 2 items are values used by SSLContext and the last is a string value
296 of the ``minimumprotocol`` config option equivalent.
296 of the ``minimumprotocol`` config option equivalent.
297 """
297 """
298 if protocol not in configprotocols:
298 if protocol not in configprotocols:
299 raise ValueError('protocol value not supported: %s' % protocol)
299 raise ValueError('protocol value not supported: %s' % protocol)
300
300
301 # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
301 # Despite its name, PROTOCOL_SSLv23 selects the highest protocol
302 # that both ends support, including TLS protocols. On legacy stacks,
302 # that both ends support, including TLS protocols. On legacy stacks,
303 # the highest it likely goes is TLS 1.0. On modern stacks, it can
303 # the highest it likely goes is TLS 1.0. On modern stacks, it can
304 # support TLS 1.2.
304 # support TLS 1.2.
305 #
305 #
306 # The PROTOCOL_TLSv* constants select a specific TLS version
306 # The PROTOCOL_TLSv* constants select a specific TLS version
307 # only (as opposed to multiple versions). So the method for
307 # only (as opposed to multiple versions). So the method for
308 # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
308 # supporting multiple TLS versions is to use PROTOCOL_SSLv23 and
309 # disable protocols via SSLContext.options and OP_NO_* constants.
309 # disable protocols via SSLContext.options and OP_NO_* constants.
310 # However, SSLContext.options doesn't work unless we have the
310 # However, SSLContext.options doesn't work unless we have the
311 # full/real SSLContext available to us.
311 # full/real SSLContext available to us.
312 if supportedprotocols == set(['tls1.0']):
312 if supportedprotocols == set(['tls1.0']):
313 if protocol != 'tls1.0':
313 if protocol != 'tls1.0':
314 raise error.Abort(_('current Python does not support protocol '
314 raise error.Abort(_('current Python does not support protocol '
315 'setting %s') % protocol,
315 'setting %s') % protocol,
316 hint=_('upgrade Python or disable setting since '
316 hint=_('upgrade Python or disable setting since '
317 'only TLS 1.0 is supported'))
317 'only TLS 1.0 is supported'))
318
318
319 return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
319 return ssl.PROTOCOL_TLSv1, 0, 'tls1.0'
320
320
321 # WARNING: returned options don't work unless the modern ssl module
321 # WARNING: returned options don't work unless the modern ssl module
322 # is available. Be careful when adding options here.
322 # is available. Be careful when adding options here.
323
323
324 # SSLv2 and SSLv3 are broken. We ban them outright.
324 # SSLv2 and SSLv3 are broken. We ban them outright.
325 options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
325 options = ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
326
326
327 if protocol == 'tls1.0':
327 if protocol == 'tls1.0':
328 # Defaults above are to use TLS 1.0+
328 # Defaults above are to use TLS 1.0+
329 pass
329 pass
330 elif protocol == 'tls1.1':
330 elif protocol == 'tls1.1':
331 options |= ssl.OP_NO_TLSv1
331 options |= ssl.OP_NO_TLSv1
332 elif protocol == 'tls1.2':
332 elif protocol == 'tls1.2':
333 options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
333 options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1
334 else:
334 else:
335 raise error.Abort(_('this should not happen'))
335 raise error.Abort(_('this should not happen'))
336
336
337 # Prevent CRIME.
337 # Prevent CRIME.
338 # There is no guarantee this attribute is defined on the module.
338 # There is no guarantee this attribute is defined on the module.
339 options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
339 options |= getattr(ssl, 'OP_NO_COMPRESSION', 0)
340
340
341 return ssl.PROTOCOL_SSLv23, options, protocol
341 return ssl.PROTOCOL_SSLv23, options, protocol
342
342
343 def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
343 def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
344 """Add SSL/TLS to a socket.
344 """Add SSL/TLS to a socket.
345
345
346 This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
346 This is a glorified wrapper for ``ssl.wrap_socket()``. It makes sane
347 choices based on what security options are available.
347 choices based on what security options are available.
348
348
349 In addition to the arguments supported by ``ssl.wrap_socket``, we allow
349 In addition to the arguments supported by ``ssl.wrap_socket``, we allow
350 the following additional arguments:
350 the following additional arguments:
351
351
352 * serverhostname - The expected hostname of the remote server. If the
352 * serverhostname - The expected hostname of the remote server. If the
353 server (and client) support SNI, this tells the server which certificate
353 server (and client) support SNI, this tells the server which certificate
354 to use.
354 to use.
355 """
355 """
356 if not serverhostname:
356 if not serverhostname:
357 raise error.Abort(_('serverhostname argument is required'))
357 raise error.Abort(_('serverhostname argument is required'))
358
358
359 settings = _hostsettings(ui, serverhostname)
359 settings = _hostsettings(ui, serverhostname)
360
360
361 # We can't use ssl.create_default_context() because it calls
361 # We can't use ssl.create_default_context() because it calls
362 # load_default_certs() unless CA arguments are passed to it. We want to
362 # load_default_certs() unless CA arguments are passed to it. We want to
363 # have explicit control over CA loading because implicitly loading
363 # have explicit control over CA loading because implicitly loading
364 # CAs may undermine the user's intent. For example, a user may define a CA
364 # CAs may undermine the user's intent. For example, a user may define a CA
365 # bundle with a specific CA cert removed. If the system/default CA bundle
365 # bundle with a specific CA cert removed. If the system/default CA bundle
366 # is loaded and contains that removed CA, you've just undone the user's
366 # is loaded and contains that removed CA, you've just undone the user's
367 # choice.
367 # choice.
368 sslcontext = SSLContext(settings['protocol'])
368 sslcontext = SSLContext(settings['protocol'])
369
369
370 # This is a no-op unless using modern ssl.
370 # This is a no-op unless using modern ssl.
371 sslcontext.options |= settings['ctxoptions']
371 sslcontext.options |= settings['ctxoptions']
372
372
373 # This still works on our fake SSLContext.
373 # This still works on our fake SSLContext.
374 sslcontext.verify_mode = settings['verifymode']
374 sslcontext.verify_mode = settings['verifymode']
375
375
376 if settings['ciphers']:
376 if settings['ciphers']:
377 try:
377 try:
378 sslcontext.set_ciphers(settings['ciphers'])
378 sslcontext.set_ciphers(settings['ciphers'])
379 except ssl.SSLError as e:
379 except ssl.SSLError as e:
380 raise error.Abort(_('could not set ciphers: %s') % e.args[0],
380 raise error.Abort(_('could not set ciphers: %s') % e.args[0],
381 hint=_('change cipher string (%s) in config') %
381 hint=_('change cipher string (%s) in config') %
382 settings['ciphers'])
382 settings['ciphers'])
383
383
384 if certfile is not None:
384 if certfile is not None:
385 def password():
385 def password():
386 f = keyfile or certfile
386 f = keyfile or certfile
387 return ui.getpass(_('passphrase for %s: ') % f, '')
387 return ui.getpass(_('passphrase for %s: ') % f, '')
388 sslcontext.load_cert_chain(certfile, keyfile, password)
388 sslcontext.load_cert_chain(certfile, keyfile, password)
389
389
390 if settings['cafile'] is not None:
390 if settings['cafile'] is not None:
391 try:
391 try:
392 sslcontext.load_verify_locations(cafile=settings['cafile'])
392 sslcontext.load_verify_locations(cafile=settings['cafile'])
393 except ssl.SSLError as e:
393 except ssl.SSLError as e:
394 if len(e.args) == 1: # pypy has different SSLError args
394 if len(e.args) == 1: # pypy has different SSLError args
395 msg = e.args[0]
395 msg = e.args[0]
396 else:
396 else:
397 msg = e.args[1]
397 msg = e.args[1]
398 raise error.Abort(_('error loading CA file %s: %s') % (
398 raise error.Abort(_('error loading CA file %s: %s') % (
399 settings['cafile'], msg),
399 settings['cafile'], msg),
400 hint=_('file is empty or malformed?'))
400 hint=_('file is empty or malformed?'))
401 caloaded = True
401 caloaded = True
402 elif settings['allowloaddefaultcerts']:
402 elif settings['allowloaddefaultcerts']:
403 # This is a no-op on old Python.
403 # This is a no-op on old Python.
404 sslcontext.load_default_certs()
404 sslcontext.load_default_certs()
405 caloaded = True
405 caloaded = True
406 else:
406 else:
407 caloaded = False
407 caloaded = False
408
408
409 try:
409 try:
410 sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
410 sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname)
411 except ssl.SSLError as e:
411 except ssl.SSLError as e:
412 # If we're doing certificate verification and no CA certs are loaded,
412 # If we're doing certificate verification and no CA certs are loaded,
413 # that is almost certainly the reason why verification failed. Provide
413 # that is almost certainly the reason why verification failed. Provide
414 # a hint to the user.
414 # a hint to the user.
415 # Only modern ssl module exposes SSLContext.get_ca_certs() so we can
415 # Only modern ssl module exposes SSLContext.get_ca_certs() so we can
416 # only show this warning if modern ssl is available.
416 # only show this warning if modern ssl is available.
417 # The exception handler is here because of
417 # The exception handler is here because of
418 # https://bugs.python.org/issue20916.
418 # https://bugs.python.org/issue20916.
419 try:
419 try:
420 if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
420 if (caloaded and settings['verifymode'] == ssl.CERT_REQUIRED and
421 modernssl and not sslcontext.get_ca_certs()):
421 modernssl and not sslcontext.get_ca_certs()):
422 ui.warn(_('(an attempt was made to load CA certificates but '
422 ui.warn(_('(an attempt was made to load CA certificates but '
423 'none were loaded; see '
423 'none were loaded; see '
424 'https://mercurial-scm.org/wiki/SecureConnections '
424 'https://mercurial-scm.org/wiki/SecureConnections '
425 'for how to configure Mercurial to avoid this '
425 'for how to configure Mercurial to avoid this '
426 'error)\n'))
426 'error)\n'))
427 except ssl.SSLError:
427 except ssl.SSLError:
428 pass
428 pass
429 # Try to print more helpful error messages for known failures.
429 # Try to print more helpful error messages for known failures.
430 if util.safehasattr(e, 'reason'):
430 if util.safehasattr(e, 'reason'):
431 # This error occurs when the client and server don't share a
431 # This error occurs when the client and server don't share a
432 # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3
432 # common/supported SSL/TLS protocol. We've disabled SSLv2 and SSLv3
433 # outright. Hopefully the reason for this error is that we require
433 # outright. Hopefully the reason for this error is that we require
434 # TLS 1.1+ and the server only supports TLS 1.0. Whatever the
434 # TLS 1.1+ and the server only supports TLS 1.0. Whatever the
435 # reason, try to emit an actionable warning.
435 # reason, try to emit an actionable warning.
436 if e.reason == 'UNSUPPORTED_PROTOCOL':
436 if e.reason == 'UNSUPPORTED_PROTOCOL':
437 # We attempted TLS 1.0+.
437 # We attempted TLS 1.0+.
438 if settings['protocolui'] == 'tls1.0':
438 if settings['protocolui'] == 'tls1.0':
439 # We support more than just TLS 1.0+. If this happens,
439 # We support more than just TLS 1.0+. If this happens,
440 # the likely scenario is either the client or the server
440 # the likely scenario is either the client or the server
441 # is really old. (e.g. server doesn't support TLS 1.0+ or
441 # is really old. (e.g. server doesn't support TLS 1.0+ or
442 # client doesn't support modern TLS versions introduced
442 # client doesn't support modern TLS versions introduced
443 # several years from when this comment was written).
443 # several years from when this comment was written).
444 if supportedprotocols != set(['tls1.0']):
444 if supportedprotocols != set(['tls1.0']):
445 ui.warn(_(
445 ui.warn(_(
446 '(could not communicate with %s using security '
446 '(could not communicate with %s using security '
447 'protocols %s; if you are using a modern Mercurial '
447 'protocols %s; if you are using a modern Mercurial '
448 'version, consider contacting the operator of this '
448 'version, consider contacting the operator of this '
449 'server; see '
449 'server; see '
450 'https://mercurial-scm.org/wiki/SecureConnections '
450 'https://mercurial-scm.org/wiki/SecureConnections '
451 'for more info)\n') % (
451 'for more info)\n') % (
452 serverhostname,
452 serverhostname,
453 ', '.join(sorted(supportedprotocols))))
453 ', '.join(sorted(supportedprotocols))))
454 else:
454 else:
455 ui.warn(_(
455 ui.warn(_(
456 '(could not communicate with %s using TLS 1.0; the '
456 '(could not communicate with %s using TLS 1.0; the '
457 'likely cause of this is the server no longer '
457 'likely cause of this is the server no longer '
458 'supports TLS 1.0 because it has known security '
458 'supports TLS 1.0 because it has known security '
459 'vulnerabilities; see '
459 'vulnerabilities; see '
460 'https://mercurial-scm.org/wiki/SecureConnections '
460 'https://mercurial-scm.org/wiki/SecureConnections '
461 'for more info)\n') % serverhostname)
461 'for more info)\n') % serverhostname)
462 else:
462 else:
463 # We attempted TLS 1.1+. We can only get here if the client
463 # We attempted TLS 1.1+. We can only get here if the client
464 # supports the configured protocol. So the likely reason is
464 # supports the configured protocol. So the likely reason is
465 # the client wants better security than the server can
465 # the client wants better security than the server can
466 # offer.
466 # offer.
467 ui.warn(_(
467 ui.warn(_(
468 '(could not negotiate a common security protocol (%s+) '
468 '(could not negotiate a common security protocol (%s+) '
469 'with %s; the likely cause is Mercurial is configured '
469 'with %s; the likely cause is Mercurial is configured '
470 'to be more secure than the server can support)\n') % (
470 'to be more secure than the server can support)\n') % (
471 settings['protocolui'], serverhostname))
471 settings['protocolui'], serverhostname))
472 ui.warn(_('(consider contacting the operator of this '
472 ui.warn(_('(consider contacting the operator of this '
473 'server and ask them to support modern TLS '
473 'server and ask them to support modern TLS '
474 'protocol versions; or, set '
474 'protocol versions; or, set '
475 'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
475 'hostsecurity.%s:minimumprotocol=tls1.0 to allow '
476 'use of legacy, less secure protocols when '
476 'use of legacy, less secure protocols when '
477 'communicating with this server)\n') %
477 'communicating with this server)\n') %
478 serverhostname)
478 serverhostname)
479 ui.warn(_(
479 ui.warn(_(
480 '(see https://mercurial-scm.org/wiki/SecureConnections '
480 '(see https://mercurial-scm.org/wiki/SecureConnections '
481 'for more info)\n'))
481 'for more info)\n'))
482 raise
482 raise
483
483
484 # check if wrap_socket failed silently because socket had been
484 # check if wrap_socket failed silently because socket had been
485 # closed
485 # closed
486 # - see http://bugs.python.org/issue13721
486 # - see http://bugs.python.org/issue13721
487 if not sslsocket.cipher():
487 if not sslsocket.cipher():
488 raise error.Abort(_('ssl connection failed'))
488 raise error.Abort(_('ssl connection failed'))
489
489
490 sslsocket._hgstate = {
490 sslsocket._hgstate = {
491 'caloaded': caloaded,
491 'caloaded': caloaded,
492 'hostname': serverhostname,
492 'hostname': serverhostname,
493 'settings': settings,
493 'settings': settings,
494 'ui': ui,
494 'ui': ui,
495 }
495 }
496
496
497 return sslsocket
497 return sslsocket
498
498
499 def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
499 def wrapserversocket(sock, ui, certfile=None, keyfile=None, cafile=None,
500 requireclientcert=False):
500 requireclientcert=False):
501 """Wrap a socket for use by servers.
501 """Wrap a socket for use by servers.
502
502
503 ``certfile`` and ``keyfile`` specify the files containing the certificate's
503 ``certfile`` and ``keyfile`` specify the files containing the certificate's
504 public and private keys, respectively. Both keys can be defined in the same
504 public and private keys, respectively. Both keys can be defined in the same
505 file via ``certfile`` (the private key must come first in the file).
505 file via ``certfile`` (the private key must come first in the file).
506
506
507 ``cafile`` defines the path to certificate authorities.
507 ``cafile`` defines the path to certificate authorities.
508
508
509 ``requireclientcert`` specifies whether to require client certificates.
509 ``requireclientcert`` specifies whether to require client certificates.
510
510
511 Typically ``cafile`` is only defined if ``requireclientcert`` is true.
511 Typically ``cafile`` is only defined if ``requireclientcert`` is true.
512 """
512 """
513 protocol, options, _protocolui = protocolsettings('tls1.0')
513 protocol, options, _protocolui = protocolsettings('tls1.0')
514
514
515 # This config option is intended for use in tests only. It is a giant
515 # This config option is intended for use in tests only. It is a giant
516 # footgun to kill security. Don't define it.
516 # footgun to kill security. Don't define it.
517 exactprotocol = ui.config('devel', 'serverexactprotocol')
517 exactprotocol = ui.config('devel', 'serverexactprotocol')
518 if exactprotocol == 'tls1.0':
518 if exactprotocol == 'tls1.0':
519 protocol = ssl.PROTOCOL_TLSv1
519 protocol = ssl.PROTOCOL_TLSv1
520 elif exactprotocol == 'tls1.1':
520 elif exactprotocol == 'tls1.1':
521 if 'tls1.1' not in supportedprotocols:
521 if 'tls1.1' not in supportedprotocols:
522 raise error.Abort(_('TLS 1.1 not supported by this Python'))
522 raise error.Abort(_('TLS 1.1 not supported by this Python'))
523 protocol = ssl.PROTOCOL_TLSv1_1
523 protocol = ssl.PROTOCOL_TLSv1_1
524 elif exactprotocol == 'tls1.2':
524 elif exactprotocol == 'tls1.2':
525 if 'tls1.2' not in supportedprotocols:
525 if 'tls1.2' not in supportedprotocols:
526 raise error.Abort(_('TLS 1.2 not supported by this Python'))
526 raise error.Abort(_('TLS 1.2 not supported by this Python'))
527 protocol = ssl.PROTOCOL_TLSv1_2
527 protocol = ssl.PROTOCOL_TLSv1_2
528 elif exactprotocol:
528 elif exactprotocol:
529 raise error.Abort(_('invalid value for serverexactprotocol: %s') %
529 raise error.Abort(_('invalid value for serverexactprotocol: %s') %
530 exactprotocol)
530 exactprotocol)
531
531
532 if modernssl:
532 if modernssl:
533 # We /could/ use create_default_context() here since it doesn't load
533 # We /could/ use create_default_context() here since it doesn't load
534 # CAs when configured for client auth. However, it is hard-coded to
534 # CAs when configured for client auth. However, it is hard-coded to
535 # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
535 # use ssl.PROTOCOL_SSLv23 which may not be appropriate here.
536 sslcontext = SSLContext(protocol)
536 sslcontext = SSLContext(protocol)
537 sslcontext.options |= options
537 sslcontext.options |= options
538
538
539 # Improve forward secrecy.
539 # Improve forward secrecy.
540 sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
540 sslcontext.options |= getattr(ssl, 'OP_SINGLE_DH_USE', 0)
541 sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
541 sslcontext.options |= getattr(ssl, 'OP_SINGLE_ECDH_USE', 0)
542
542
543 # Use the list of more secure ciphers if found in the ssl module.
543 # Use the list of more secure ciphers if found in the ssl module.
544 if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
544 if util.safehasattr(ssl, '_RESTRICTED_SERVER_CIPHERS'):
545 sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
545 sslcontext.options |= getattr(ssl, 'OP_CIPHER_SERVER_PREFERENCE', 0)
546 sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
546 sslcontext.set_ciphers(ssl._RESTRICTED_SERVER_CIPHERS)
547 else:
547 else:
548 sslcontext = SSLContext(ssl.PROTOCOL_TLSv1)
548 sslcontext = SSLContext(ssl.PROTOCOL_TLSv1)
549
549
550 if requireclientcert:
550 if requireclientcert:
551 sslcontext.verify_mode = ssl.CERT_REQUIRED
551 sslcontext.verify_mode = ssl.CERT_REQUIRED
552 else:
552 else:
553 sslcontext.verify_mode = ssl.CERT_NONE
553 sslcontext.verify_mode = ssl.CERT_NONE
554
554
555 if certfile or keyfile:
555 if certfile or keyfile:
556 sslcontext.load_cert_chain(certfile=certfile, keyfile=keyfile)
556 sslcontext.load_cert_chain(certfile=certfile, keyfile=keyfile)
557
557
558 if cafile:
558 if cafile:
559 sslcontext.load_verify_locations(cafile=cafile)
559 sslcontext.load_verify_locations(cafile=cafile)
560
560
561 return sslcontext.wrap_socket(sock, server_side=True)
561 return sslcontext.wrap_socket(sock, server_side=True)
562
562
563 class wildcarderror(Exception):
563 class wildcarderror(Exception):
564 """Represents an error parsing wildcards in DNS name."""
564 """Represents an error parsing wildcards in DNS name."""
565
565
566 def _dnsnamematch(dn, hostname, maxwildcards=1):
566 def _dnsnamematch(dn, hostname, maxwildcards=1):
567 """Match DNS names according RFC 6125 section 6.4.3.
567 """Match DNS names according RFC 6125 section 6.4.3.
568
568
569 This code is effectively copied from CPython's ssl._dnsname_match.
569 This code is effectively copied from CPython's ssl._dnsname_match.
570
570
571 Returns a bool indicating whether the expected hostname matches
571 Returns a bool indicating whether the expected hostname matches
572 the value in ``dn``.
572 the value in ``dn``.
573 """
573 """
574 pats = []
574 pats = []
575 if not dn:
575 if not dn:
576 return False
576 return False
577
577
578 pieces = dn.split(r'.')
578 pieces = dn.split(r'.')
579 leftmost = pieces[0]
579 leftmost = pieces[0]
580 remainder = pieces[1:]
580 remainder = pieces[1:]
581 wildcards = leftmost.count('*')
581 wildcards = leftmost.count('*')
582 if wildcards > maxwildcards:
582 if wildcards > maxwildcards:
583 raise wildcarderror(
583 raise wildcarderror(
584 _('too many wildcards in certificate DNS name: %s') % dn)
584 _('too many wildcards in certificate DNS name: %s') % dn)
585
585
586 # speed up common case w/o wildcards
586 # speed up common case w/o wildcards
587 if not wildcards:
587 if not wildcards:
588 return dn.lower() == hostname.lower()
588 return dn.lower() == hostname.lower()
589
589
590 # RFC 6125, section 6.4.3, subitem 1.
590 # RFC 6125, section 6.4.3, subitem 1.
591 # The client SHOULD NOT attempt to match a presented identifier in which
591 # The client SHOULD NOT attempt to match a presented identifier in which
592 # the wildcard character comprises a label other than the left-most label.
592 # the wildcard character comprises a label other than the left-most label.
593 if leftmost == '*':
593 if leftmost == '*':
594 # When '*' is a fragment by itself, it matches a non-empty dotless
594 # When '*' is a fragment by itself, it matches a non-empty dotless
595 # fragment.
595 # fragment.
596 pats.append('[^.]+')
596 pats.append('[^.]+')
597 elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
597 elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
598 # RFC 6125, section 6.4.3, subitem 3.
598 # RFC 6125, section 6.4.3, subitem 3.
599 # The client SHOULD NOT attempt to match a presented identifier
599 # The client SHOULD NOT attempt to match a presented identifier
600 # where the wildcard character is embedded within an A-label or
600 # where the wildcard character is embedded within an A-label or
601 # U-label of an internationalized domain name.
601 # U-label of an internationalized domain name.
602 pats.append(re.escape(leftmost))
602 pats.append(re.escape(leftmost))
603 else:
603 else:
604 # Otherwise, '*' matches any dotless string, e.g. www*
604 # Otherwise, '*' matches any dotless string, e.g. www*
605 pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
605 pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
606
606
607 # add the remaining fragments, ignore any wildcards
607 # add the remaining fragments, ignore any wildcards
608 for frag in remainder:
608 for frag in remainder:
609 pats.append(re.escape(frag))
609 pats.append(re.escape(frag))
610
610
611 pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
611 pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
612 return pat.match(hostname) is not None
612 return pat.match(hostname) is not None
613
613
614 def _verifycert(cert, hostname):
614 def _verifycert(cert, hostname):
615 '''Verify that cert (in socket.getpeercert() format) matches hostname.
615 '''Verify that cert (in socket.getpeercert() format) matches hostname.
616 CRLs is not handled.
616 CRLs is not handled.
617
617
618 Returns error message if any problems are found and None on success.
618 Returns error message if any problems are found and None on success.
619 '''
619 '''
620 if not cert:
620 if not cert:
621 return _('no certificate received')
621 return _('no certificate received')
622
622
623 dnsnames = []
623 dnsnames = []
624 san = cert.get('subjectAltName', [])
624 san = cert.get('subjectAltName', [])
625 for key, value in san:
625 for key, value in san:
626 if key == 'DNS':
626 if key == 'DNS':
627 try:
627 try:
628 if _dnsnamematch(value, hostname):
628 if _dnsnamematch(value, hostname):
629 return
629 return
630 except wildcarderror as e:
630 except wildcarderror as e:
631 return e.args[0]
631 return e.args[0]
632
632
633 dnsnames.append(value)
633 dnsnames.append(value)
634
634
635 if not dnsnames:
635 if not dnsnames:
636 # The subject is only checked when there is no DNS in subjectAltName.
636 # The subject is only checked when there is no DNS in subjectAltName.
637 for sub in cert.get('subject', []):
637 for sub in cert.get('subject', []):
638 for key, value in sub:
638 for key, value in sub:
639 # According to RFC 2818 the most specific Common Name must
639 # According to RFC 2818 the most specific Common Name must
640 # be used.
640 # be used.
641 if key == 'commonName':
641 if key == 'commonName':
642 # 'subject' entries are unicode.
642 # 'subject' entries are unicode.
643 try:
643 try:
644 value = value.encode('ascii')
644 value = value.encode('ascii')
645 except UnicodeEncodeError:
645 except UnicodeEncodeError:
646 return _('IDN in certificate not supported')
646 return _('IDN in certificate not supported')
647
647
648 try:
648 try:
649 if _dnsnamematch(value, hostname):
649 if _dnsnamematch(value, hostname):
650 return
650 return
651 except wildcarderror as e:
651 except wildcarderror as e:
652 return e.args[0]
652 return e.args[0]
653
653
654 dnsnames.append(value)
654 dnsnames.append(value)
655
655
656 if len(dnsnames) > 1:
656 if len(dnsnames) > 1:
657 return _('certificate is for %s') % ', '.join(dnsnames)
657 return _('certificate is for %s') % ', '.join(dnsnames)
658 elif len(dnsnames) == 1:
658 elif len(dnsnames) == 1:
659 return _('certificate is for %s') % dnsnames[0]
659 return _('certificate is for %s') % dnsnames[0]
660 else:
660 else:
661 return _('no commonName or subjectAltName found in certificate')
661 return _('no commonName or subjectAltName found in certificate')
662
662
663 def _plainapplepython():
663 def _plainapplepython():
664 """return true if this seems to be a pure Apple Python that
664 """return true if this seems to be a pure Apple Python that
665 * is unfrozen and presumably has the whole mercurial module in the file
665 * is unfrozen and presumably has the whole mercurial module in the file
666 system
666 system
667 * presumably is an Apple Python that uses Apple OpenSSL which has patches
667 * presumably is an Apple Python that uses Apple OpenSSL which has patches
668 for using system certificate store CAs in addition to the provided
668 for using system certificate store CAs in addition to the provided
669 cacerts file
669 cacerts file
670 """
670 """
671 if (pycompat.sysplatform != 'darwin' or
671 if (pycompat.sysplatform != 'darwin' or
672 util.mainfrozen() or not sys.executable):
672 util.mainfrozen() or not pycompat.sysexecutable):
673 return False
673 return False
674 exe = os.path.realpath(sys.executable).lower()
674 exe = os.path.realpath(pycompat.sysexecutable).lower()
675 return (exe.startswith('/usr/bin/python') or
675 return (exe.startswith('/usr/bin/python') or
676 exe.startswith('/system/library/frameworks/python.framework/'))
676 exe.startswith('/system/library/frameworks/python.framework/'))
677
677
678 _systemcacertpaths = [
678 _systemcacertpaths = [
679 # RHEL, CentOS, and Fedora
679 # RHEL, CentOS, and Fedora
680 '/etc/pki/tls/certs/ca-bundle.trust.crt',
680 '/etc/pki/tls/certs/ca-bundle.trust.crt',
681 # Debian, Ubuntu, Gentoo
681 # Debian, Ubuntu, Gentoo
682 '/etc/ssl/certs/ca-certificates.crt',
682 '/etc/ssl/certs/ca-certificates.crt',
683 ]
683 ]
684
684
685 def _defaultcacerts(ui):
685 def _defaultcacerts(ui):
686 """return path to default CA certificates or None.
686 """return path to default CA certificates or None.
687
687
688 It is assumed this function is called when the returned certificates
688 It is assumed this function is called when the returned certificates
689 file will actually be used to validate connections. Therefore this
689 file will actually be used to validate connections. Therefore this
690 function may print warnings or debug messages assuming this usage.
690 function may print warnings or debug messages assuming this usage.
691
691
692 We don't print a message when the Python is able to load default
692 We don't print a message when the Python is able to load default
693 CA certs because this scenario is detected at socket connect time.
693 CA certs because this scenario is detected at socket connect time.
694 """
694 """
695 # The "certifi" Python package provides certificates. If it is installed
695 # The "certifi" Python package provides certificates. If it is installed
696 # and usable, assume the user intends it to be used and use it.
696 # and usable, assume the user intends it to be used and use it.
697 try:
697 try:
698 import certifi
698 import certifi
699 certs = certifi.where()
699 certs = certifi.where()
700 if os.path.exists(certs):
700 if os.path.exists(certs):
701 ui.debug('using ca certificates from certifi\n')
701 ui.debug('using ca certificates from certifi\n')
702 return certs
702 return certs
703 except (ImportError, AttributeError):
703 except (ImportError, AttributeError):
704 pass
704 pass
705
705
706 # On Windows, only the modern ssl module is capable of loading the system
706 # On Windows, only the modern ssl module is capable of loading the system
707 # CA certificates. If we're not capable of doing that, emit a warning
707 # CA certificates. If we're not capable of doing that, emit a warning
708 # because we'll get a certificate verification error later and the lack
708 # because we'll get a certificate verification error later and the lack
709 # of loaded CA certificates will be the reason why.
709 # of loaded CA certificates will be the reason why.
710 # Assertion: this code is only called if certificates are being verified.
710 # Assertion: this code is only called if certificates are being verified.
711 if pycompat.osname == 'nt':
711 if pycompat.osname == 'nt':
712 if not _canloaddefaultcerts:
712 if not _canloaddefaultcerts:
713 ui.warn(_('(unable to load Windows CA certificates; see '
713 ui.warn(_('(unable to load Windows CA certificates; see '
714 'https://mercurial-scm.org/wiki/SecureConnections for '
714 'https://mercurial-scm.org/wiki/SecureConnections for '
715 'how to configure Mercurial to avoid this message)\n'))
715 'how to configure Mercurial to avoid this message)\n'))
716
716
717 return None
717 return None
718
718
719 # Apple's OpenSSL has patches that allow a specially constructed certificate
719 # Apple's OpenSSL has patches that allow a specially constructed certificate
720 # to load the system CA store. If we're running on Apple Python, use this
720 # to load the system CA store. If we're running on Apple Python, use this
721 # trick.
721 # trick.
722 if _plainapplepython():
722 if _plainapplepython():
723 dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
723 dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem')
724 if os.path.exists(dummycert):
724 if os.path.exists(dummycert):
725 return dummycert
725 return dummycert
726
726
727 # The Apple OpenSSL trick isn't available to us. If Python isn't able to
727 # The Apple OpenSSL trick isn't available to us. If Python isn't able to
728 # load system certs, we're out of luck.
728 # load system certs, we're out of luck.
729 if pycompat.sysplatform == 'darwin':
729 if pycompat.sysplatform == 'darwin':
730 # FUTURE Consider looking for Homebrew or MacPorts installed certs
730 # FUTURE Consider looking for Homebrew or MacPorts installed certs
731 # files. Also consider exporting the keychain certs to a file during
731 # files. Also consider exporting the keychain certs to a file during
732 # Mercurial install.
732 # Mercurial install.
733 if not _canloaddefaultcerts:
733 if not _canloaddefaultcerts:
734 ui.warn(_('(unable to load CA certificates; see '
734 ui.warn(_('(unable to load CA certificates; see '
735 'https://mercurial-scm.org/wiki/SecureConnections for '
735 'https://mercurial-scm.org/wiki/SecureConnections for '
736 'how to configure Mercurial to avoid this message)\n'))
736 'how to configure Mercurial to avoid this message)\n'))
737 return None
737 return None
738
738
739 # / is writable on Windows. Out of an abundance of caution make sure
739 # / is writable on Windows. Out of an abundance of caution make sure
740 # we're not on Windows because paths from _systemcacerts could be installed
740 # we're not on Windows because paths from _systemcacerts could be installed
741 # by non-admin users.
741 # by non-admin users.
742 assert pycompat.osname != 'nt'
742 assert pycompat.osname != 'nt'
743
743
744 # Try to find CA certificates in well-known locations. We print a warning
744 # Try to find CA certificates in well-known locations. We print a warning
745 # when using a found file because we don't want too much silent magic
745 # when using a found file because we don't want too much silent magic
746 # for security settings. The expectation is that proper Mercurial
746 # for security settings. The expectation is that proper Mercurial
747 # installs will have the CA certs path defined at install time and the
747 # installs will have the CA certs path defined at install time and the
748 # installer/packager will make an appropriate decision on the user's
748 # installer/packager will make an appropriate decision on the user's
749 # behalf. We only get here and perform this setting as a feature of
749 # behalf. We only get here and perform this setting as a feature of
750 # last resort.
750 # last resort.
751 if not _canloaddefaultcerts:
751 if not _canloaddefaultcerts:
752 for path in _systemcacertpaths:
752 for path in _systemcacertpaths:
753 if os.path.isfile(path):
753 if os.path.isfile(path):
754 ui.warn(_('(using CA certificates from %s; if you see this '
754 ui.warn(_('(using CA certificates from %s; if you see this '
755 'message, your Mercurial install is not properly '
755 'message, your Mercurial install is not properly '
756 'configured; see '
756 'configured; see '
757 'https://mercurial-scm.org/wiki/SecureConnections '
757 'https://mercurial-scm.org/wiki/SecureConnections '
758 'for how to configure Mercurial to avoid this '
758 'for how to configure Mercurial to avoid this '
759 'message)\n') % path)
759 'message)\n') % path)
760 return path
760 return path
761
761
762 ui.warn(_('(unable to load CA certificates; see '
762 ui.warn(_('(unable to load CA certificates; see '
763 'https://mercurial-scm.org/wiki/SecureConnections for '
763 'https://mercurial-scm.org/wiki/SecureConnections for '
764 'how to configure Mercurial to avoid this message)\n'))
764 'how to configure Mercurial to avoid this message)\n'))
765
765
766 return None
766 return None
767
767
768 def validatesocket(sock):
768 def validatesocket(sock):
769 """Validate a socket meets security requirements.
769 """Validate a socket meets security requirements.
770
770
771 The passed socket must have been created with ``wrapsocket()``.
771 The passed socket must have been created with ``wrapsocket()``.
772 """
772 """
773 host = sock._hgstate['hostname']
773 host = sock._hgstate['hostname']
774 ui = sock._hgstate['ui']
774 ui = sock._hgstate['ui']
775 settings = sock._hgstate['settings']
775 settings = sock._hgstate['settings']
776
776
777 try:
777 try:
778 peercert = sock.getpeercert(True)
778 peercert = sock.getpeercert(True)
779 peercert2 = sock.getpeercert()
779 peercert2 = sock.getpeercert()
780 except AttributeError:
780 except AttributeError:
781 raise error.Abort(_('%s ssl connection error') % host)
781 raise error.Abort(_('%s ssl connection error') % host)
782
782
783 if not peercert:
783 if not peercert:
784 raise error.Abort(_('%s certificate error: '
784 raise error.Abort(_('%s certificate error: '
785 'no certificate received') % host)
785 'no certificate received') % host)
786
786
787 if settings['disablecertverification']:
787 if settings['disablecertverification']:
788 # We don't print the certificate fingerprint because it shouldn't
788 # We don't print the certificate fingerprint because it shouldn't
789 # be necessary: if the user requested certificate verification be
789 # be necessary: if the user requested certificate verification be
790 # disabled, they presumably already saw a message about the inability
790 # disabled, they presumably already saw a message about the inability
791 # to verify the certificate and this message would have printed the
791 # to verify the certificate and this message would have printed the
792 # fingerprint. So printing the fingerprint here adds little to no
792 # fingerprint. So printing the fingerprint here adds little to no
793 # value.
793 # value.
794 ui.warn(_('warning: connection security to %s is disabled per current '
794 ui.warn(_('warning: connection security to %s is disabled per current '
795 'settings; communication is susceptible to eavesdropping '
795 'settings; communication is susceptible to eavesdropping '
796 'and tampering\n') % host)
796 'and tampering\n') % host)
797 return
797 return
798
798
799 # If a certificate fingerprint is pinned, use it and only it to
799 # If a certificate fingerprint is pinned, use it and only it to
800 # validate the remote cert.
800 # validate the remote cert.
801 peerfingerprints = {
801 peerfingerprints = {
802 'sha1': hashlib.sha1(peercert).hexdigest(),
802 'sha1': hashlib.sha1(peercert).hexdigest(),
803 'sha256': hashlib.sha256(peercert).hexdigest(),
803 'sha256': hashlib.sha256(peercert).hexdigest(),
804 'sha512': hashlib.sha512(peercert).hexdigest(),
804 'sha512': hashlib.sha512(peercert).hexdigest(),
805 }
805 }
806
806
807 def fmtfingerprint(s):
807 def fmtfingerprint(s):
808 return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
808 return ':'.join([s[x:x + 2] for x in range(0, len(s), 2)])
809
809
810 nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
810 nicefingerprint = 'sha256:%s' % fmtfingerprint(peerfingerprints['sha256'])
811
811
812 if settings['certfingerprints']:
812 if settings['certfingerprints']:
813 for hash, fingerprint in settings['certfingerprints']:
813 for hash, fingerprint in settings['certfingerprints']:
814 if peerfingerprints[hash].lower() == fingerprint:
814 if peerfingerprints[hash].lower() == fingerprint:
815 ui.debug('%s certificate matched fingerprint %s:%s\n' %
815 ui.debug('%s certificate matched fingerprint %s:%s\n' %
816 (host, hash, fmtfingerprint(fingerprint)))
816 (host, hash, fmtfingerprint(fingerprint)))
817 return
817 return
818
818
819 # Pinned fingerprint didn't match. This is a fatal error.
819 # Pinned fingerprint didn't match. This is a fatal error.
820 if settings['legacyfingerprint']:
820 if settings['legacyfingerprint']:
821 section = 'hostfingerprint'
821 section = 'hostfingerprint'
822 nice = fmtfingerprint(peerfingerprints['sha1'])
822 nice = fmtfingerprint(peerfingerprints['sha1'])
823 else:
823 else:
824 section = 'hostsecurity'
824 section = 'hostsecurity'
825 nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
825 nice = '%s:%s' % (hash, fmtfingerprint(peerfingerprints[hash]))
826 raise error.Abort(_('certificate for %s has unexpected '
826 raise error.Abort(_('certificate for %s has unexpected '
827 'fingerprint %s') % (host, nice),
827 'fingerprint %s') % (host, nice),
828 hint=_('check %s configuration') % section)
828 hint=_('check %s configuration') % section)
829
829
830 # Security is enabled but no CAs are loaded. We can't establish trust
830 # Security is enabled but no CAs are loaded. We can't establish trust
831 # for the cert so abort.
831 # for the cert so abort.
832 if not sock._hgstate['caloaded']:
832 if not sock._hgstate['caloaded']:
833 raise error.Abort(
833 raise error.Abort(
834 _('unable to verify security of %s (no loaded CA certificates); '
834 _('unable to verify security of %s (no loaded CA certificates); '
835 'refusing to connect') % host,
835 'refusing to connect') % host,
836 hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
836 hint=_('see https://mercurial-scm.org/wiki/SecureConnections for '
837 'how to configure Mercurial to avoid this error or set '
837 'how to configure Mercurial to avoid this error or set '
838 'hostsecurity.%s:fingerprints=%s to trust this server') %
838 'hostsecurity.%s:fingerprints=%s to trust this server') %
839 (host, nicefingerprint))
839 (host, nicefingerprint))
840
840
841 msg = _verifycert(peercert2, host)
841 msg = _verifycert(peercert2, host)
842 if msg:
842 if msg:
843 raise error.Abort(_('%s certificate error: %s') % (host, msg),
843 raise error.Abort(_('%s certificate error: %s') % (host, msg),
844 hint=_('set hostsecurity.%s:certfingerprints=%s '
844 hint=_('set hostsecurity.%s:certfingerprints=%s '
845 'config setting or use --insecure to connect '
845 'config setting or use --insecure to connect '
846 'insecurely') %
846 'insecurely') %
847 (host, nicefingerprint))
847 (host, nicefingerprint))
@@ -1,3246 +1,3246
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import collections
20 import collections
21 import datetime
21 import datetime
22 import errno
22 import errno
23 import gc
23 import gc
24 import hashlib
24 import hashlib
25 import imp
25 import imp
26 import os
26 import os
27 import platform as pyplatform
27 import platform as pyplatform
28 import re as remod
28 import re as remod
29 import shutil
29 import shutil
30 import signal
30 import signal
31 import socket
31 import socket
32 import stat
32 import stat
33 import string
33 import string
34 import subprocess
34 import subprocess
35 import sys
35 import sys
36 import tempfile
36 import tempfile
37 import textwrap
37 import textwrap
38 import time
38 import time
39 import traceback
39 import traceback
40 import zlib
40 import zlib
41
41
42 from . import (
42 from . import (
43 encoding,
43 encoding,
44 error,
44 error,
45 i18n,
45 i18n,
46 osutil,
46 osutil,
47 parsers,
47 parsers,
48 pycompat,
48 pycompat,
49 )
49 )
50
50
51 empty = pycompat.empty
51 empty = pycompat.empty
52 httplib = pycompat.httplib
52 httplib = pycompat.httplib
53 httpserver = pycompat.httpserver
53 httpserver = pycompat.httpserver
54 pickle = pycompat.pickle
54 pickle = pycompat.pickle
55 queue = pycompat.queue
55 queue = pycompat.queue
56 socketserver = pycompat.socketserver
56 socketserver = pycompat.socketserver
57 stderr = pycompat.stderr
57 stderr = pycompat.stderr
58 stdin = pycompat.stdin
58 stdin = pycompat.stdin
59 stdout = pycompat.stdout
59 stdout = pycompat.stdout
60 stringio = pycompat.stringio
60 stringio = pycompat.stringio
61 urlerr = pycompat.urlerr
61 urlerr = pycompat.urlerr
62 urlparse = pycompat.urlparse
62 urlparse = pycompat.urlparse
63 urlreq = pycompat.urlreq
63 urlreq = pycompat.urlreq
64 xmlrpclib = pycompat.xmlrpclib
64 xmlrpclib = pycompat.xmlrpclib
65
65
66 if pycompat.osname == 'nt':
66 if pycompat.osname == 'nt':
67 from . import windows as platform
67 from . import windows as platform
68 stdout = platform.winstdout(pycompat.stdout)
68 stdout = platform.winstdout(pycompat.stdout)
69 else:
69 else:
70 from . import posix as platform
70 from . import posix as platform
71
71
72 _ = i18n._
72 _ = i18n._
73
73
74 bindunixsocket = platform.bindunixsocket
74 bindunixsocket = platform.bindunixsocket
75 cachestat = platform.cachestat
75 cachestat = platform.cachestat
76 checkexec = platform.checkexec
76 checkexec = platform.checkexec
77 checklink = platform.checklink
77 checklink = platform.checklink
78 copymode = platform.copymode
78 copymode = platform.copymode
79 executablepath = platform.executablepath
79 executablepath = platform.executablepath
80 expandglobs = platform.expandglobs
80 expandglobs = platform.expandglobs
81 explainexit = platform.explainexit
81 explainexit = platform.explainexit
82 findexe = platform.findexe
82 findexe = platform.findexe
83 gethgcmd = platform.gethgcmd
83 gethgcmd = platform.gethgcmd
84 getuser = platform.getuser
84 getuser = platform.getuser
85 getpid = os.getpid
85 getpid = os.getpid
86 groupmembers = platform.groupmembers
86 groupmembers = platform.groupmembers
87 groupname = platform.groupname
87 groupname = platform.groupname
88 hidewindow = platform.hidewindow
88 hidewindow = platform.hidewindow
89 isexec = platform.isexec
89 isexec = platform.isexec
90 isowner = platform.isowner
90 isowner = platform.isowner
91 localpath = platform.localpath
91 localpath = platform.localpath
92 lookupreg = platform.lookupreg
92 lookupreg = platform.lookupreg
93 makedir = platform.makedir
93 makedir = platform.makedir
94 nlinks = platform.nlinks
94 nlinks = platform.nlinks
95 normpath = platform.normpath
95 normpath = platform.normpath
96 normcase = platform.normcase
96 normcase = platform.normcase
97 normcasespec = platform.normcasespec
97 normcasespec = platform.normcasespec
98 normcasefallback = platform.normcasefallback
98 normcasefallback = platform.normcasefallback
99 openhardlinks = platform.openhardlinks
99 openhardlinks = platform.openhardlinks
100 oslink = platform.oslink
100 oslink = platform.oslink
101 parsepatchoutput = platform.parsepatchoutput
101 parsepatchoutput = platform.parsepatchoutput
102 pconvert = platform.pconvert
102 pconvert = platform.pconvert
103 poll = platform.poll
103 poll = platform.poll
104 popen = platform.popen
104 popen = platform.popen
105 posixfile = platform.posixfile
105 posixfile = platform.posixfile
106 quotecommand = platform.quotecommand
106 quotecommand = platform.quotecommand
107 readpipe = platform.readpipe
107 readpipe = platform.readpipe
108 rename = platform.rename
108 rename = platform.rename
109 removedirs = platform.removedirs
109 removedirs = platform.removedirs
110 samedevice = platform.samedevice
110 samedevice = platform.samedevice
111 samefile = platform.samefile
111 samefile = platform.samefile
112 samestat = platform.samestat
112 samestat = platform.samestat
113 setbinary = platform.setbinary
113 setbinary = platform.setbinary
114 setflags = platform.setflags
114 setflags = platform.setflags
115 setsignalhandler = platform.setsignalhandler
115 setsignalhandler = platform.setsignalhandler
116 shellquote = platform.shellquote
116 shellquote = platform.shellquote
117 spawndetached = platform.spawndetached
117 spawndetached = platform.spawndetached
118 split = platform.split
118 split = platform.split
119 sshargs = platform.sshargs
119 sshargs = platform.sshargs
120 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
120 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
121 statisexec = platform.statisexec
121 statisexec = platform.statisexec
122 statislink = platform.statislink
122 statislink = platform.statislink
123 testpid = platform.testpid
123 testpid = platform.testpid
124 umask = platform.umask
124 umask = platform.umask
125 unlink = platform.unlink
125 unlink = platform.unlink
126 unlinkpath = platform.unlinkpath
126 unlinkpath = platform.unlinkpath
127 username = platform.username
127 username = platform.username
128
128
129 # Python compatibility
129 # Python compatibility
130
130
131 _notset = object()
131 _notset = object()
132
132
133 # disable Python's problematic floating point timestamps (issue4836)
133 # disable Python's problematic floating point timestamps (issue4836)
134 # (Python hypocritically says you shouldn't change this behavior in
134 # (Python hypocritically says you shouldn't change this behavior in
135 # libraries, and sure enough Mercurial is not a library.)
135 # libraries, and sure enough Mercurial is not a library.)
136 os.stat_float_times(False)
136 os.stat_float_times(False)
137
137
138 def safehasattr(thing, attr):
138 def safehasattr(thing, attr):
139 return getattr(thing, attr, _notset) is not _notset
139 return getattr(thing, attr, _notset) is not _notset
140
140
141 DIGESTS = {
141 DIGESTS = {
142 'md5': hashlib.md5,
142 'md5': hashlib.md5,
143 'sha1': hashlib.sha1,
143 'sha1': hashlib.sha1,
144 'sha512': hashlib.sha512,
144 'sha512': hashlib.sha512,
145 }
145 }
146 # List of digest types from strongest to weakest
146 # List of digest types from strongest to weakest
147 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
147 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
148
148
149 for k in DIGESTS_BY_STRENGTH:
149 for k in DIGESTS_BY_STRENGTH:
150 assert k in DIGESTS
150 assert k in DIGESTS
151
151
152 class digester(object):
152 class digester(object):
153 """helper to compute digests.
153 """helper to compute digests.
154
154
155 This helper can be used to compute one or more digests given their name.
155 This helper can be used to compute one or more digests given their name.
156
156
157 >>> d = digester(['md5', 'sha1'])
157 >>> d = digester(['md5', 'sha1'])
158 >>> d.update('foo')
158 >>> d.update('foo')
159 >>> [k for k in sorted(d)]
159 >>> [k for k in sorted(d)]
160 ['md5', 'sha1']
160 ['md5', 'sha1']
161 >>> d['md5']
161 >>> d['md5']
162 'acbd18db4cc2f85cedef654fccc4a4d8'
162 'acbd18db4cc2f85cedef654fccc4a4d8'
163 >>> d['sha1']
163 >>> d['sha1']
164 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
164 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
165 >>> digester.preferred(['md5', 'sha1'])
165 >>> digester.preferred(['md5', 'sha1'])
166 'sha1'
166 'sha1'
167 """
167 """
168
168
169 def __init__(self, digests, s=''):
169 def __init__(self, digests, s=''):
170 self._hashes = {}
170 self._hashes = {}
171 for k in digests:
171 for k in digests:
172 if k not in DIGESTS:
172 if k not in DIGESTS:
173 raise Abort(_('unknown digest type: %s') % k)
173 raise Abort(_('unknown digest type: %s') % k)
174 self._hashes[k] = DIGESTS[k]()
174 self._hashes[k] = DIGESTS[k]()
175 if s:
175 if s:
176 self.update(s)
176 self.update(s)
177
177
178 def update(self, data):
178 def update(self, data):
179 for h in self._hashes.values():
179 for h in self._hashes.values():
180 h.update(data)
180 h.update(data)
181
181
182 def __getitem__(self, key):
182 def __getitem__(self, key):
183 if key not in DIGESTS:
183 if key not in DIGESTS:
184 raise Abort(_('unknown digest type: %s') % k)
184 raise Abort(_('unknown digest type: %s') % k)
185 return self._hashes[key].hexdigest()
185 return self._hashes[key].hexdigest()
186
186
187 def __iter__(self):
187 def __iter__(self):
188 return iter(self._hashes)
188 return iter(self._hashes)
189
189
190 @staticmethod
190 @staticmethod
191 def preferred(supported):
191 def preferred(supported):
192 """returns the strongest digest type in both supported and DIGESTS."""
192 """returns the strongest digest type in both supported and DIGESTS."""
193
193
194 for k in DIGESTS_BY_STRENGTH:
194 for k in DIGESTS_BY_STRENGTH:
195 if k in supported:
195 if k in supported:
196 return k
196 return k
197 return None
197 return None
198
198
199 class digestchecker(object):
199 class digestchecker(object):
200 """file handle wrapper that additionally checks content against a given
200 """file handle wrapper that additionally checks content against a given
201 size and digests.
201 size and digests.
202
202
203 d = digestchecker(fh, size, {'md5': '...'})
203 d = digestchecker(fh, size, {'md5': '...'})
204
204
205 When multiple digests are given, all of them are validated.
205 When multiple digests are given, all of them are validated.
206 """
206 """
207
207
208 def __init__(self, fh, size, digests):
208 def __init__(self, fh, size, digests):
209 self._fh = fh
209 self._fh = fh
210 self._size = size
210 self._size = size
211 self._got = 0
211 self._got = 0
212 self._digests = dict(digests)
212 self._digests = dict(digests)
213 self._digester = digester(self._digests.keys())
213 self._digester = digester(self._digests.keys())
214
214
215 def read(self, length=-1):
215 def read(self, length=-1):
216 content = self._fh.read(length)
216 content = self._fh.read(length)
217 self._digester.update(content)
217 self._digester.update(content)
218 self._got += len(content)
218 self._got += len(content)
219 return content
219 return content
220
220
221 def validate(self):
221 def validate(self):
222 if self._size != self._got:
222 if self._size != self._got:
223 raise Abort(_('size mismatch: expected %d, got %d') %
223 raise Abort(_('size mismatch: expected %d, got %d') %
224 (self._size, self._got))
224 (self._size, self._got))
225 for k, v in self._digests.items():
225 for k, v in self._digests.items():
226 if v != self._digester[k]:
226 if v != self._digester[k]:
227 # i18n: first parameter is a digest name
227 # i18n: first parameter is a digest name
228 raise Abort(_('%s mismatch: expected %s, got %s') %
228 raise Abort(_('%s mismatch: expected %s, got %s') %
229 (k, v, self._digester[k]))
229 (k, v, self._digester[k]))
230
230
231 try:
231 try:
232 buffer = buffer
232 buffer = buffer
233 except NameError:
233 except NameError:
234 if not pycompat.ispy3:
234 if not pycompat.ispy3:
235 def buffer(sliceable, offset=0):
235 def buffer(sliceable, offset=0):
236 return sliceable[offset:]
236 return sliceable[offset:]
237 else:
237 else:
238 def buffer(sliceable, offset=0):
238 def buffer(sliceable, offset=0):
239 return memoryview(sliceable)[offset:]
239 return memoryview(sliceable)[offset:]
240
240
241 closefds = pycompat.osname == 'posix'
241 closefds = pycompat.osname == 'posix'
242
242
243 _chunksize = 4096
243 _chunksize = 4096
244
244
245 class bufferedinputpipe(object):
245 class bufferedinputpipe(object):
246 """a manually buffered input pipe
246 """a manually buffered input pipe
247
247
248 Python will not let us use buffered IO and lazy reading with 'polling' at
248 Python will not let us use buffered IO and lazy reading with 'polling' at
249 the same time. We cannot probe the buffer state and select will not detect
249 the same time. We cannot probe the buffer state and select will not detect
250 that data are ready to read if they are already buffered.
250 that data are ready to read if they are already buffered.
251
251
252 This class let us work around that by implementing its own buffering
252 This class let us work around that by implementing its own buffering
253 (allowing efficient readline) while offering a way to know if the buffer is
253 (allowing efficient readline) while offering a way to know if the buffer is
254 empty from the output (allowing collaboration of the buffer with polling).
254 empty from the output (allowing collaboration of the buffer with polling).
255
255
256 This class lives in the 'util' module because it makes use of the 'os'
256 This class lives in the 'util' module because it makes use of the 'os'
257 module from the python stdlib.
257 module from the python stdlib.
258 """
258 """
259
259
260 def __init__(self, input):
260 def __init__(self, input):
261 self._input = input
261 self._input = input
262 self._buffer = []
262 self._buffer = []
263 self._eof = False
263 self._eof = False
264 self._lenbuf = 0
264 self._lenbuf = 0
265
265
266 @property
266 @property
267 def hasbuffer(self):
267 def hasbuffer(self):
268 """True is any data is currently buffered
268 """True is any data is currently buffered
269
269
270 This will be used externally a pre-step for polling IO. If there is
270 This will be used externally a pre-step for polling IO. If there is
271 already data then no polling should be set in place."""
271 already data then no polling should be set in place."""
272 return bool(self._buffer)
272 return bool(self._buffer)
273
273
274 @property
274 @property
275 def closed(self):
275 def closed(self):
276 return self._input.closed
276 return self._input.closed
277
277
278 def fileno(self):
278 def fileno(self):
279 return self._input.fileno()
279 return self._input.fileno()
280
280
281 def close(self):
281 def close(self):
282 return self._input.close()
282 return self._input.close()
283
283
284 def read(self, size):
284 def read(self, size):
285 while (not self._eof) and (self._lenbuf < size):
285 while (not self._eof) and (self._lenbuf < size):
286 self._fillbuffer()
286 self._fillbuffer()
287 return self._frombuffer(size)
287 return self._frombuffer(size)
288
288
289 def readline(self, *args, **kwargs):
289 def readline(self, *args, **kwargs):
290 if 1 < len(self._buffer):
290 if 1 < len(self._buffer):
291 # this should not happen because both read and readline end with a
291 # this should not happen because both read and readline end with a
292 # _frombuffer call that collapse it.
292 # _frombuffer call that collapse it.
293 self._buffer = [''.join(self._buffer)]
293 self._buffer = [''.join(self._buffer)]
294 self._lenbuf = len(self._buffer[0])
294 self._lenbuf = len(self._buffer[0])
295 lfi = -1
295 lfi = -1
296 if self._buffer:
296 if self._buffer:
297 lfi = self._buffer[-1].find('\n')
297 lfi = self._buffer[-1].find('\n')
298 while (not self._eof) and lfi < 0:
298 while (not self._eof) and lfi < 0:
299 self._fillbuffer()
299 self._fillbuffer()
300 if self._buffer:
300 if self._buffer:
301 lfi = self._buffer[-1].find('\n')
301 lfi = self._buffer[-1].find('\n')
302 size = lfi + 1
302 size = lfi + 1
303 if lfi < 0: # end of file
303 if lfi < 0: # end of file
304 size = self._lenbuf
304 size = self._lenbuf
305 elif 1 < len(self._buffer):
305 elif 1 < len(self._buffer):
306 # we need to take previous chunks into account
306 # we need to take previous chunks into account
307 size += self._lenbuf - len(self._buffer[-1])
307 size += self._lenbuf - len(self._buffer[-1])
308 return self._frombuffer(size)
308 return self._frombuffer(size)
309
309
310 def _frombuffer(self, size):
310 def _frombuffer(self, size):
311 """return at most 'size' data from the buffer
311 """return at most 'size' data from the buffer
312
312
313 The data are removed from the buffer."""
313 The data are removed from the buffer."""
314 if size == 0 or not self._buffer:
314 if size == 0 or not self._buffer:
315 return ''
315 return ''
316 buf = self._buffer[0]
316 buf = self._buffer[0]
317 if 1 < len(self._buffer):
317 if 1 < len(self._buffer):
318 buf = ''.join(self._buffer)
318 buf = ''.join(self._buffer)
319
319
320 data = buf[:size]
320 data = buf[:size]
321 buf = buf[len(data):]
321 buf = buf[len(data):]
322 if buf:
322 if buf:
323 self._buffer = [buf]
323 self._buffer = [buf]
324 self._lenbuf = len(buf)
324 self._lenbuf = len(buf)
325 else:
325 else:
326 self._buffer = []
326 self._buffer = []
327 self._lenbuf = 0
327 self._lenbuf = 0
328 return data
328 return data
329
329
330 def _fillbuffer(self):
330 def _fillbuffer(self):
331 """read data to the buffer"""
331 """read data to the buffer"""
332 data = os.read(self._input.fileno(), _chunksize)
332 data = os.read(self._input.fileno(), _chunksize)
333 if not data:
333 if not data:
334 self._eof = True
334 self._eof = True
335 else:
335 else:
336 self._lenbuf += len(data)
336 self._lenbuf += len(data)
337 self._buffer.append(data)
337 self._buffer.append(data)
338
338
339 def popen2(cmd, env=None, newlines=False):
339 def popen2(cmd, env=None, newlines=False):
340 # Setting bufsize to -1 lets the system decide the buffer size.
340 # Setting bufsize to -1 lets the system decide the buffer size.
341 # The default for bufsize is 0, meaning unbuffered. This leads to
341 # The default for bufsize is 0, meaning unbuffered. This leads to
342 # poor performance on Mac OS X: http://bugs.python.org/issue4194
342 # poor performance on Mac OS X: http://bugs.python.org/issue4194
343 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
343 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
344 close_fds=closefds,
344 close_fds=closefds,
345 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
345 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
346 universal_newlines=newlines,
346 universal_newlines=newlines,
347 env=env)
347 env=env)
348 return p.stdin, p.stdout
348 return p.stdin, p.stdout
349
349
350 def popen3(cmd, env=None, newlines=False):
350 def popen3(cmd, env=None, newlines=False):
351 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
351 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
352 return stdin, stdout, stderr
352 return stdin, stdout, stderr
353
353
354 def popen4(cmd, env=None, newlines=False, bufsize=-1):
354 def popen4(cmd, env=None, newlines=False, bufsize=-1):
355 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
355 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
356 close_fds=closefds,
356 close_fds=closefds,
357 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
357 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
358 stderr=subprocess.PIPE,
358 stderr=subprocess.PIPE,
359 universal_newlines=newlines,
359 universal_newlines=newlines,
360 env=env)
360 env=env)
361 return p.stdin, p.stdout, p.stderr, p
361 return p.stdin, p.stdout, p.stderr, p
362
362
363 def version():
363 def version():
364 """Return version information if available."""
364 """Return version information if available."""
365 try:
365 try:
366 from . import __version__
366 from . import __version__
367 return __version__.version
367 return __version__.version
368 except ImportError:
368 except ImportError:
369 return 'unknown'
369 return 'unknown'
370
370
371 def versiontuple(v=None, n=4):
371 def versiontuple(v=None, n=4):
372 """Parses a Mercurial version string into an N-tuple.
372 """Parses a Mercurial version string into an N-tuple.
373
373
374 The version string to be parsed is specified with the ``v`` argument.
374 The version string to be parsed is specified with the ``v`` argument.
375 If it isn't defined, the current Mercurial version string will be parsed.
375 If it isn't defined, the current Mercurial version string will be parsed.
376
376
377 ``n`` can be 2, 3, or 4. Here is how some version strings map to
377 ``n`` can be 2, 3, or 4. Here is how some version strings map to
378 returned values:
378 returned values:
379
379
380 >>> v = '3.6.1+190-df9b73d2d444'
380 >>> v = '3.6.1+190-df9b73d2d444'
381 >>> versiontuple(v, 2)
381 >>> versiontuple(v, 2)
382 (3, 6)
382 (3, 6)
383 >>> versiontuple(v, 3)
383 >>> versiontuple(v, 3)
384 (3, 6, 1)
384 (3, 6, 1)
385 >>> versiontuple(v, 4)
385 >>> versiontuple(v, 4)
386 (3, 6, 1, '190-df9b73d2d444')
386 (3, 6, 1, '190-df9b73d2d444')
387
387
388 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
388 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
389 (3, 6, 1, '190-df9b73d2d444+20151118')
389 (3, 6, 1, '190-df9b73d2d444+20151118')
390
390
391 >>> v = '3.6'
391 >>> v = '3.6'
392 >>> versiontuple(v, 2)
392 >>> versiontuple(v, 2)
393 (3, 6)
393 (3, 6)
394 >>> versiontuple(v, 3)
394 >>> versiontuple(v, 3)
395 (3, 6, None)
395 (3, 6, None)
396 >>> versiontuple(v, 4)
396 >>> versiontuple(v, 4)
397 (3, 6, None, None)
397 (3, 6, None, None)
398
398
399 >>> v = '3.9-rc'
399 >>> v = '3.9-rc'
400 >>> versiontuple(v, 2)
400 >>> versiontuple(v, 2)
401 (3, 9)
401 (3, 9)
402 >>> versiontuple(v, 3)
402 >>> versiontuple(v, 3)
403 (3, 9, None)
403 (3, 9, None)
404 >>> versiontuple(v, 4)
404 >>> versiontuple(v, 4)
405 (3, 9, None, 'rc')
405 (3, 9, None, 'rc')
406
406
407 >>> v = '3.9-rc+2-02a8fea4289b'
407 >>> v = '3.9-rc+2-02a8fea4289b'
408 >>> versiontuple(v, 2)
408 >>> versiontuple(v, 2)
409 (3, 9)
409 (3, 9)
410 >>> versiontuple(v, 3)
410 >>> versiontuple(v, 3)
411 (3, 9, None)
411 (3, 9, None)
412 >>> versiontuple(v, 4)
412 >>> versiontuple(v, 4)
413 (3, 9, None, 'rc+2-02a8fea4289b')
413 (3, 9, None, 'rc+2-02a8fea4289b')
414 """
414 """
415 if not v:
415 if not v:
416 v = version()
416 v = version()
417 parts = remod.split('[\+-]', v, 1)
417 parts = remod.split('[\+-]', v, 1)
418 if len(parts) == 1:
418 if len(parts) == 1:
419 vparts, extra = parts[0], None
419 vparts, extra = parts[0], None
420 else:
420 else:
421 vparts, extra = parts
421 vparts, extra = parts
422
422
423 vints = []
423 vints = []
424 for i in vparts.split('.'):
424 for i in vparts.split('.'):
425 try:
425 try:
426 vints.append(int(i))
426 vints.append(int(i))
427 except ValueError:
427 except ValueError:
428 break
428 break
429 # (3, 6) -> (3, 6, None)
429 # (3, 6) -> (3, 6, None)
430 while len(vints) < 3:
430 while len(vints) < 3:
431 vints.append(None)
431 vints.append(None)
432
432
433 if n == 2:
433 if n == 2:
434 return (vints[0], vints[1])
434 return (vints[0], vints[1])
435 if n == 3:
435 if n == 3:
436 return (vints[0], vints[1], vints[2])
436 return (vints[0], vints[1], vints[2])
437 if n == 4:
437 if n == 4:
438 return (vints[0], vints[1], vints[2], extra)
438 return (vints[0], vints[1], vints[2], extra)
439
439
440 # used by parsedate
440 # used by parsedate
441 defaultdateformats = (
441 defaultdateformats = (
442 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
442 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
443 '%Y-%m-%dT%H:%M', # without seconds
443 '%Y-%m-%dT%H:%M', # without seconds
444 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
444 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
445 '%Y-%m-%dT%H%M', # without seconds
445 '%Y-%m-%dT%H%M', # without seconds
446 '%Y-%m-%d %H:%M:%S', # our common legal variant
446 '%Y-%m-%d %H:%M:%S', # our common legal variant
447 '%Y-%m-%d %H:%M', # without seconds
447 '%Y-%m-%d %H:%M', # without seconds
448 '%Y-%m-%d %H%M%S', # without :
448 '%Y-%m-%d %H%M%S', # without :
449 '%Y-%m-%d %H%M', # without seconds
449 '%Y-%m-%d %H%M', # without seconds
450 '%Y-%m-%d %I:%M:%S%p',
450 '%Y-%m-%d %I:%M:%S%p',
451 '%Y-%m-%d %H:%M',
451 '%Y-%m-%d %H:%M',
452 '%Y-%m-%d %I:%M%p',
452 '%Y-%m-%d %I:%M%p',
453 '%Y-%m-%d',
453 '%Y-%m-%d',
454 '%m-%d',
454 '%m-%d',
455 '%m/%d',
455 '%m/%d',
456 '%m/%d/%y',
456 '%m/%d/%y',
457 '%m/%d/%Y',
457 '%m/%d/%Y',
458 '%a %b %d %H:%M:%S %Y',
458 '%a %b %d %H:%M:%S %Y',
459 '%a %b %d %I:%M:%S%p %Y',
459 '%a %b %d %I:%M:%S%p %Y',
460 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
460 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
461 '%b %d %H:%M:%S %Y',
461 '%b %d %H:%M:%S %Y',
462 '%b %d %I:%M:%S%p %Y',
462 '%b %d %I:%M:%S%p %Y',
463 '%b %d %H:%M:%S',
463 '%b %d %H:%M:%S',
464 '%b %d %I:%M:%S%p',
464 '%b %d %I:%M:%S%p',
465 '%b %d %H:%M',
465 '%b %d %H:%M',
466 '%b %d %I:%M%p',
466 '%b %d %I:%M%p',
467 '%b %d %Y',
467 '%b %d %Y',
468 '%b %d',
468 '%b %d',
469 '%H:%M:%S',
469 '%H:%M:%S',
470 '%I:%M:%S%p',
470 '%I:%M:%S%p',
471 '%H:%M',
471 '%H:%M',
472 '%I:%M%p',
472 '%I:%M%p',
473 )
473 )
474
474
475 extendeddateformats = defaultdateformats + (
475 extendeddateformats = defaultdateformats + (
476 "%Y",
476 "%Y",
477 "%Y-%m",
477 "%Y-%m",
478 "%b",
478 "%b",
479 "%b %Y",
479 "%b %Y",
480 )
480 )
481
481
482 def cachefunc(func):
482 def cachefunc(func):
483 '''cache the result of function calls'''
483 '''cache the result of function calls'''
484 # XXX doesn't handle keywords args
484 # XXX doesn't handle keywords args
485 if func.__code__.co_argcount == 0:
485 if func.__code__.co_argcount == 0:
486 cache = []
486 cache = []
487 def f():
487 def f():
488 if len(cache) == 0:
488 if len(cache) == 0:
489 cache.append(func())
489 cache.append(func())
490 return cache[0]
490 return cache[0]
491 return f
491 return f
492 cache = {}
492 cache = {}
493 if func.__code__.co_argcount == 1:
493 if func.__code__.co_argcount == 1:
494 # we gain a small amount of time because
494 # we gain a small amount of time because
495 # we don't need to pack/unpack the list
495 # we don't need to pack/unpack the list
496 def f(arg):
496 def f(arg):
497 if arg not in cache:
497 if arg not in cache:
498 cache[arg] = func(arg)
498 cache[arg] = func(arg)
499 return cache[arg]
499 return cache[arg]
500 else:
500 else:
501 def f(*args):
501 def f(*args):
502 if args not in cache:
502 if args not in cache:
503 cache[args] = func(*args)
503 cache[args] = func(*args)
504 return cache[args]
504 return cache[args]
505
505
506 return f
506 return f
507
507
508 class sortdict(dict):
508 class sortdict(dict):
509 '''a simple sorted dictionary'''
509 '''a simple sorted dictionary'''
510 def __init__(self, data=None):
510 def __init__(self, data=None):
511 self._list = []
511 self._list = []
512 if data:
512 if data:
513 self.update(data)
513 self.update(data)
514 def copy(self):
514 def copy(self):
515 return sortdict(self)
515 return sortdict(self)
516 def __setitem__(self, key, val):
516 def __setitem__(self, key, val):
517 if key in self:
517 if key in self:
518 self._list.remove(key)
518 self._list.remove(key)
519 self._list.append(key)
519 self._list.append(key)
520 dict.__setitem__(self, key, val)
520 dict.__setitem__(self, key, val)
521 def __iter__(self):
521 def __iter__(self):
522 return self._list.__iter__()
522 return self._list.__iter__()
523 def update(self, src):
523 def update(self, src):
524 if isinstance(src, dict):
524 if isinstance(src, dict):
525 src = src.iteritems()
525 src = src.iteritems()
526 for k, v in src:
526 for k, v in src:
527 self[k] = v
527 self[k] = v
528 def clear(self):
528 def clear(self):
529 dict.clear(self)
529 dict.clear(self)
530 self._list = []
530 self._list = []
531 def items(self):
531 def items(self):
532 return [(k, self[k]) for k in self._list]
532 return [(k, self[k]) for k in self._list]
533 def __delitem__(self, key):
533 def __delitem__(self, key):
534 dict.__delitem__(self, key)
534 dict.__delitem__(self, key)
535 self._list.remove(key)
535 self._list.remove(key)
536 def pop(self, key, *args, **kwargs):
536 def pop(self, key, *args, **kwargs):
537 dict.pop(self, key, *args, **kwargs)
537 dict.pop(self, key, *args, **kwargs)
538 try:
538 try:
539 self._list.remove(key)
539 self._list.remove(key)
540 except ValueError:
540 except ValueError:
541 pass
541 pass
542 def keys(self):
542 def keys(self):
543 return self._list
543 return self._list
544 def iterkeys(self):
544 def iterkeys(self):
545 return self._list.__iter__()
545 return self._list.__iter__()
546 def iteritems(self):
546 def iteritems(self):
547 for k in self._list:
547 for k in self._list:
548 yield k, self[k]
548 yield k, self[k]
549 def insert(self, index, key, val):
549 def insert(self, index, key, val):
550 self._list.insert(index, key)
550 self._list.insert(index, key)
551 dict.__setitem__(self, key, val)
551 dict.__setitem__(self, key, val)
552 def __repr__(self):
552 def __repr__(self):
553 if not self:
553 if not self:
554 return '%s()' % self.__class__.__name__
554 return '%s()' % self.__class__.__name__
555 return '%s(%r)' % (self.__class__.__name__, self.items())
555 return '%s(%r)' % (self.__class__.__name__, self.items())
556
556
557 class _lrucachenode(object):
557 class _lrucachenode(object):
558 """A node in a doubly linked list.
558 """A node in a doubly linked list.
559
559
560 Holds a reference to nodes on either side as well as a key-value
560 Holds a reference to nodes on either side as well as a key-value
561 pair for the dictionary entry.
561 pair for the dictionary entry.
562 """
562 """
563 __slots__ = (u'next', u'prev', u'key', u'value')
563 __slots__ = (u'next', u'prev', u'key', u'value')
564
564
565 def __init__(self):
565 def __init__(self):
566 self.next = None
566 self.next = None
567 self.prev = None
567 self.prev = None
568
568
569 self.key = _notset
569 self.key = _notset
570 self.value = None
570 self.value = None
571
571
572 def markempty(self):
572 def markempty(self):
573 """Mark the node as emptied."""
573 """Mark the node as emptied."""
574 self.key = _notset
574 self.key = _notset
575
575
576 class lrucachedict(object):
576 class lrucachedict(object):
577 """Dict that caches most recent accesses and sets.
577 """Dict that caches most recent accesses and sets.
578
578
579 The dict consists of an actual backing dict - indexed by original
579 The dict consists of an actual backing dict - indexed by original
580 key - and a doubly linked circular list defining the order of entries in
580 key - and a doubly linked circular list defining the order of entries in
581 the cache.
581 the cache.
582
582
583 The head node is the newest entry in the cache. If the cache is full,
583 The head node is the newest entry in the cache. If the cache is full,
584 we recycle head.prev and make it the new head. Cache accesses result in
584 we recycle head.prev and make it the new head. Cache accesses result in
585 the node being moved to before the existing head and being marked as the
585 the node being moved to before the existing head and being marked as the
586 new head node.
586 new head node.
587 """
587 """
588 def __init__(self, max):
588 def __init__(self, max):
589 self._cache = {}
589 self._cache = {}
590
590
591 self._head = head = _lrucachenode()
591 self._head = head = _lrucachenode()
592 head.prev = head
592 head.prev = head
593 head.next = head
593 head.next = head
594 self._size = 1
594 self._size = 1
595 self._capacity = max
595 self._capacity = max
596
596
597 def __len__(self):
597 def __len__(self):
598 return len(self._cache)
598 return len(self._cache)
599
599
600 def __contains__(self, k):
600 def __contains__(self, k):
601 return k in self._cache
601 return k in self._cache
602
602
603 def __iter__(self):
603 def __iter__(self):
604 # We don't have to iterate in cache order, but why not.
604 # We don't have to iterate in cache order, but why not.
605 n = self._head
605 n = self._head
606 for i in range(len(self._cache)):
606 for i in range(len(self._cache)):
607 yield n.key
607 yield n.key
608 n = n.next
608 n = n.next
609
609
610 def __getitem__(self, k):
610 def __getitem__(self, k):
611 node = self._cache[k]
611 node = self._cache[k]
612 self._movetohead(node)
612 self._movetohead(node)
613 return node.value
613 return node.value
614
614
615 def __setitem__(self, k, v):
615 def __setitem__(self, k, v):
616 node = self._cache.get(k)
616 node = self._cache.get(k)
617 # Replace existing value and mark as newest.
617 # Replace existing value and mark as newest.
618 if node is not None:
618 if node is not None:
619 node.value = v
619 node.value = v
620 self._movetohead(node)
620 self._movetohead(node)
621 return
621 return
622
622
623 if self._size < self._capacity:
623 if self._size < self._capacity:
624 node = self._addcapacity()
624 node = self._addcapacity()
625 else:
625 else:
626 # Grab the last/oldest item.
626 # Grab the last/oldest item.
627 node = self._head.prev
627 node = self._head.prev
628
628
629 # At capacity. Kill the old entry.
629 # At capacity. Kill the old entry.
630 if node.key is not _notset:
630 if node.key is not _notset:
631 del self._cache[node.key]
631 del self._cache[node.key]
632
632
633 node.key = k
633 node.key = k
634 node.value = v
634 node.value = v
635 self._cache[k] = node
635 self._cache[k] = node
636 # And mark it as newest entry. No need to adjust order since it
636 # And mark it as newest entry. No need to adjust order since it
637 # is already self._head.prev.
637 # is already self._head.prev.
638 self._head = node
638 self._head = node
639
639
640 def __delitem__(self, k):
640 def __delitem__(self, k):
641 node = self._cache.pop(k)
641 node = self._cache.pop(k)
642 node.markempty()
642 node.markempty()
643
643
644 # Temporarily mark as newest item before re-adjusting head to make
644 # Temporarily mark as newest item before re-adjusting head to make
645 # this node the oldest item.
645 # this node the oldest item.
646 self._movetohead(node)
646 self._movetohead(node)
647 self._head = node.next
647 self._head = node.next
648
648
649 # Additional dict methods.
649 # Additional dict methods.
650
650
651 def get(self, k, default=None):
651 def get(self, k, default=None):
652 try:
652 try:
653 return self._cache[k].value
653 return self._cache[k].value
654 except KeyError:
654 except KeyError:
655 return default
655 return default
656
656
657 def clear(self):
657 def clear(self):
658 n = self._head
658 n = self._head
659 while n.key is not _notset:
659 while n.key is not _notset:
660 n.markempty()
660 n.markempty()
661 n = n.next
661 n = n.next
662
662
663 self._cache.clear()
663 self._cache.clear()
664
664
665 def copy(self):
665 def copy(self):
666 result = lrucachedict(self._capacity)
666 result = lrucachedict(self._capacity)
667 n = self._head.prev
667 n = self._head.prev
668 # Iterate in oldest-to-newest order, so the copy has the right ordering
668 # Iterate in oldest-to-newest order, so the copy has the right ordering
669 for i in range(len(self._cache)):
669 for i in range(len(self._cache)):
670 result[n.key] = n.value
670 result[n.key] = n.value
671 n = n.prev
671 n = n.prev
672 return result
672 return result
673
673
674 def _movetohead(self, node):
674 def _movetohead(self, node):
675 """Mark a node as the newest, making it the new head.
675 """Mark a node as the newest, making it the new head.
676
676
677 When a node is accessed, it becomes the freshest entry in the LRU
677 When a node is accessed, it becomes the freshest entry in the LRU
678 list, which is denoted by self._head.
678 list, which is denoted by self._head.
679
679
680 Visually, let's make ``N`` the new head node (* denotes head):
680 Visually, let's make ``N`` the new head node (* denotes head):
681
681
682 previous/oldest <-> head <-> next/next newest
682 previous/oldest <-> head <-> next/next newest
683
683
684 ----<->--- A* ---<->-----
684 ----<->--- A* ---<->-----
685 | |
685 | |
686 E <-> D <-> N <-> C <-> B
686 E <-> D <-> N <-> C <-> B
687
687
688 To:
688 To:
689
689
690 ----<->--- N* ---<->-----
690 ----<->--- N* ---<->-----
691 | |
691 | |
692 E <-> D <-> C <-> B <-> A
692 E <-> D <-> C <-> B <-> A
693
693
694 This requires the following moves:
694 This requires the following moves:
695
695
696 C.next = D (node.prev.next = node.next)
696 C.next = D (node.prev.next = node.next)
697 D.prev = C (node.next.prev = node.prev)
697 D.prev = C (node.next.prev = node.prev)
698 E.next = N (head.prev.next = node)
698 E.next = N (head.prev.next = node)
699 N.prev = E (node.prev = head.prev)
699 N.prev = E (node.prev = head.prev)
700 N.next = A (node.next = head)
700 N.next = A (node.next = head)
701 A.prev = N (head.prev = node)
701 A.prev = N (head.prev = node)
702 """
702 """
703 head = self._head
703 head = self._head
704 # C.next = D
704 # C.next = D
705 node.prev.next = node.next
705 node.prev.next = node.next
706 # D.prev = C
706 # D.prev = C
707 node.next.prev = node.prev
707 node.next.prev = node.prev
708 # N.prev = E
708 # N.prev = E
709 node.prev = head.prev
709 node.prev = head.prev
710 # N.next = A
710 # N.next = A
711 # It is tempting to do just "head" here, however if node is
711 # It is tempting to do just "head" here, however if node is
712 # adjacent to head, this will do bad things.
712 # adjacent to head, this will do bad things.
713 node.next = head.prev.next
713 node.next = head.prev.next
714 # E.next = N
714 # E.next = N
715 node.next.prev = node
715 node.next.prev = node
716 # A.prev = N
716 # A.prev = N
717 node.prev.next = node
717 node.prev.next = node
718
718
719 self._head = node
719 self._head = node
720
720
721 def _addcapacity(self):
721 def _addcapacity(self):
722 """Add a node to the circular linked list.
722 """Add a node to the circular linked list.
723
723
724 The new node is inserted before the head node.
724 The new node is inserted before the head node.
725 """
725 """
726 head = self._head
726 head = self._head
727 node = _lrucachenode()
727 node = _lrucachenode()
728 head.prev.next = node
728 head.prev.next = node
729 node.prev = head.prev
729 node.prev = head.prev
730 node.next = head
730 node.next = head
731 head.prev = node
731 head.prev = node
732 self._size += 1
732 self._size += 1
733 return node
733 return node
734
734
735 def lrucachefunc(func):
735 def lrucachefunc(func):
736 '''cache most recent results of function calls'''
736 '''cache most recent results of function calls'''
737 cache = {}
737 cache = {}
738 order = collections.deque()
738 order = collections.deque()
739 if func.__code__.co_argcount == 1:
739 if func.__code__.co_argcount == 1:
740 def f(arg):
740 def f(arg):
741 if arg not in cache:
741 if arg not in cache:
742 if len(cache) > 20:
742 if len(cache) > 20:
743 del cache[order.popleft()]
743 del cache[order.popleft()]
744 cache[arg] = func(arg)
744 cache[arg] = func(arg)
745 else:
745 else:
746 order.remove(arg)
746 order.remove(arg)
747 order.append(arg)
747 order.append(arg)
748 return cache[arg]
748 return cache[arg]
749 else:
749 else:
750 def f(*args):
750 def f(*args):
751 if args not in cache:
751 if args not in cache:
752 if len(cache) > 20:
752 if len(cache) > 20:
753 del cache[order.popleft()]
753 del cache[order.popleft()]
754 cache[args] = func(*args)
754 cache[args] = func(*args)
755 else:
755 else:
756 order.remove(args)
756 order.remove(args)
757 order.append(args)
757 order.append(args)
758 return cache[args]
758 return cache[args]
759
759
760 return f
760 return f
761
761
762 class propertycache(object):
762 class propertycache(object):
763 def __init__(self, func):
763 def __init__(self, func):
764 self.func = func
764 self.func = func
765 self.name = func.__name__
765 self.name = func.__name__
766 def __get__(self, obj, type=None):
766 def __get__(self, obj, type=None):
767 result = self.func(obj)
767 result = self.func(obj)
768 self.cachevalue(obj, result)
768 self.cachevalue(obj, result)
769 return result
769 return result
770
770
771 def cachevalue(self, obj, value):
771 def cachevalue(self, obj, value):
772 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
772 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
773 obj.__dict__[self.name] = value
773 obj.__dict__[self.name] = value
774
774
775 def pipefilter(s, cmd):
775 def pipefilter(s, cmd):
776 '''filter string S through command CMD, returning its output'''
776 '''filter string S through command CMD, returning its output'''
777 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
777 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
778 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
778 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
779 pout, perr = p.communicate(s)
779 pout, perr = p.communicate(s)
780 return pout
780 return pout
781
781
782 def tempfilter(s, cmd):
782 def tempfilter(s, cmd):
783 '''filter string S through a pair of temporary files with CMD.
783 '''filter string S through a pair of temporary files with CMD.
784 CMD is used as a template to create the real command to be run,
784 CMD is used as a template to create the real command to be run,
785 with the strings INFILE and OUTFILE replaced by the real names of
785 with the strings INFILE and OUTFILE replaced by the real names of
786 the temporary files generated.'''
786 the temporary files generated.'''
787 inname, outname = None, None
787 inname, outname = None, None
788 try:
788 try:
789 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
789 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
790 fp = os.fdopen(infd, 'wb')
790 fp = os.fdopen(infd, 'wb')
791 fp.write(s)
791 fp.write(s)
792 fp.close()
792 fp.close()
793 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
793 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
794 os.close(outfd)
794 os.close(outfd)
795 cmd = cmd.replace('INFILE', inname)
795 cmd = cmd.replace('INFILE', inname)
796 cmd = cmd.replace('OUTFILE', outname)
796 cmd = cmd.replace('OUTFILE', outname)
797 code = os.system(cmd)
797 code = os.system(cmd)
798 if pycompat.sysplatform == 'OpenVMS' and code & 1:
798 if pycompat.sysplatform == 'OpenVMS' and code & 1:
799 code = 0
799 code = 0
800 if code:
800 if code:
801 raise Abort(_("command '%s' failed: %s") %
801 raise Abort(_("command '%s' failed: %s") %
802 (cmd, explainexit(code)))
802 (cmd, explainexit(code)))
803 return readfile(outname)
803 return readfile(outname)
804 finally:
804 finally:
805 try:
805 try:
806 if inname:
806 if inname:
807 os.unlink(inname)
807 os.unlink(inname)
808 except OSError:
808 except OSError:
809 pass
809 pass
810 try:
810 try:
811 if outname:
811 if outname:
812 os.unlink(outname)
812 os.unlink(outname)
813 except OSError:
813 except OSError:
814 pass
814 pass
815
815
816 filtertable = {
816 filtertable = {
817 'tempfile:': tempfilter,
817 'tempfile:': tempfilter,
818 'pipe:': pipefilter,
818 'pipe:': pipefilter,
819 }
819 }
820
820
821 def filter(s, cmd):
821 def filter(s, cmd):
822 "filter a string through a command that transforms its input to its output"
822 "filter a string through a command that transforms its input to its output"
823 for name, fn in filtertable.iteritems():
823 for name, fn in filtertable.iteritems():
824 if cmd.startswith(name):
824 if cmd.startswith(name):
825 return fn(s, cmd[len(name):].lstrip())
825 return fn(s, cmd[len(name):].lstrip())
826 return pipefilter(s, cmd)
826 return pipefilter(s, cmd)
827
827
828 def binary(s):
828 def binary(s):
829 """return true if a string is binary data"""
829 """return true if a string is binary data"""
830 return bool(s and '\0' in s)
830 return bool(s and '\0' in s)
831
831
832 def increasingchunks(source, min=1024, max=65536):
832 def increasingchunks(source, min=1024, max=65536):
833 '''return no less than min bytes per chunk while data remains,
833 '''return no less than min bytes per chunk while data remains,
834 doubling min after each chunk until it reaches max'''
834 doubling min after each chunk until it reaches max'''
835 def log2(x):
835 def log2(x):
836 if not x:
836 if not x:
837 return 0
837 return 0
838 i = 0
838 i = 0
839 while x:
839 while x:
840 x >>= 1
840 x >>= 1
841 i += 1
841 i += 1
842 return i - 1
842 return i - 1
843
843
844 buf = []
844 buf = []
845 blen = 0
845 blen = 0
846 for chunk in source:
846 for chunk in source:
847 buf.append(chunk)
847 buf.append(chunk)
848 blen += len(chunk)
848 blen += len(chunk)
849 if blen >= min:
849 if blen >= min:
850 if min < max:
850 if min < max:
851 min = min << 1
851 min = min << 1
852 nmin = 1 << log2(blen)
852 nmin = 1 << log2(blen)
853 if nmin > min:
853 if nmin > min:
854 min = nmin
854 min = nmin
855 if min > max:
855 if min > max:
856 min = max
856 min = max
857 yield ''.join(buf)
857 yield ''.join(buf)
858 blen = 0
858 blen = 0
859 buf = []
859 buf = []
860 if buf:
860 if buf:
861 yield ''.join(buf)
861 yield ''.join(buf)
862
862
863 Abort = error.Abort
863 Abort = error.Abort
864
864
865 def always(fn):
865 def always(fn):
866 return True
866 return True
867
867
868 def never(fn):
868 def never(fn):
869 return False
869 return False
870
870
871 def nogc(func):
871 def nogc(func):
872 """disable garbage collector
872 """disable garbage collector
873
873
874 Python's garbage collector triggers a GC each time a certain number of
874 Python's garbage collector triggers a GC each time a certain number of
875 container objects (the number being defined by gc.get_threshold()) are
875 container objects (the number being defined by gc.get_threshold()) are
876 allocated even when marked not to be tracked by the collector. Tracking has
876 allocated even when marked not to be tracked by the collector. Tracking has
877 no effect on when GCs are triggered, only on what objects the GC looks
877 no effect on when GCs are triggered, only on what objects the GC looks
878 into. As a workaround, disable GC while building complex (huge)
878 into. As a workaround, disable GC while building complex (huge)
879 containers.
879 containers.
880
880
881 This garbage collector issue have been fixed in 2.7.
881 This garbage collector issue have been fixed in 2.7.
882 """
882 """
883 if sys.version_info >= (2, 7):
883 if sys.version_info >= (2, 7):
884 return func
884 return func
885 def wrapper(*args, **kwargs):
885 def wrapper(*args, **kwargs):
886 gcenabled = gc.isenabled()
886 gcenabled = gc.isenabled()
887 gc.disable()
887 gc.disable()
888 try:
888 try:
889 return func(*args, **kwargs)
889 return func(*args, **kwargs)
890 finally:
890 finally:
891 if gcenabled:
891 if gcenabled:
892 gc.enable()
892 gc.enable()
893 return wrapper
893 return wrapper
894
894
895 def pathto(root, n1, n2):
895 def pathto(root, n1, n2):
896 '''return the relative path from one place to another.
896 '''return the relative path from one place to another.
897 root should use os.sep to separate directories
897 root should use os.sep to separate directories
898 n1 should use os.sep to separate directories
898 n1 should use os.sep to separate directories
899 n2 should use "/" to separate directories
899 n2 should use "/" to separate directories
900 returns an os.sep-separated path.
900 returns an os.sep-separated path.
901
901
902 If n1 is a relative path, it's assumed it's
902 If n1 is a relative path, it's assumed it's
903 relative to root.
903 relative to root.
904 n2 should always be relative to root.
904 n2 should always be relative to root.
905 '''
905 '''
906 if not n1:
906 if not n1:
907 return localpath(n2)
907 return localpath(n2)
908 if os.path.isabs(n1):
908 if os.path.isabs(n1):
909 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
909 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
910 return os.path.join(root, localpath(n2))
910 return os.path.join(root, localpath(n2))
911 n2 = '/'.join((pconvert(root), n2))
911 n2 = '/'.join((pconvert(root), n2))
912 a, b = splitpath(n1), n2.split('/')
912 a, b = splitpath(n1), n2.split('/')
913 a.reverse()
913 a.reverse()
914 b.reverse()
914 b.reverse()
915 while a and b and a[-1] == b[-1]:
915 while a and b and a[-1] == b[-1]:
916 a.pop()
916 a.pop()
917 b.pop()
917 b.pop()
918 b.reverse()
918 b.reverse()
919 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
919 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
920
920
921 def mainfrozen():
921 def mainfrozen():
922 """return True if we are a frozen executable.
922 """return True if we are a frozen executable.
923
923
924 The code supports py2exe (most common, Windows only) and tools/freeze
924 The code supports py2exe (most common, Windows only) and tools/freeze
925 (portable, not much used).
925 (portable, not much used).
926 """
926 """
927 return (safehasattr(sys, "frozen") or # new py2exe
927 return (safehasattr(sys, "frozen") or # new py2exe
928 safehasattr(sys, "importers") or # old py2exe
928 safehasattr(sys, "importers") or # old py2exe
929 imp.is_frozen(u"__main__")) # tools/freeze
929 imp.is_frozen(u"__main__")) # tools/freeze
930
930
931 # the location of data files matching the source code
931 # the location of data files matching the source code
932 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
932 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
933 # executable version (py2exe) doesn't support __file__
933 # executable version (py2exe) doesn't support __file__
934 datapath = os.path.dirname(sys.executable)
934 datapath = os.path.dirname(pycompat.sysexecutable)
935 else:
935 else:
936 datapath = os.path.dirname(__file__)
936 datapath = os.path.dirname(__file__)
937
937
938 if not isinstance(datapath, bytes):
938 if not isinstance(datapath, bytes):
939 datapath = pycompat.fsencode(datapath)
939 datapath = pycompat.fsencode(datapath)
940
940
941 i18n.setdatapath(datapath)
941 i18n.setdatapath(datapath)
942
942
943 _hgexecutable = None
943 _hgexecutable = None
944
944
945 def hgexecutable():
945 def hgexecutable():
946 """return location of the 'hg' executable.
946 """return location of the 'hg' executable.
947
947
948 Defaults to $HG or 'hg' in the search path.
948 Defaults to $HG or 'hg' in the search path.
949 """
949 """
950 if _hgexecutable is None:
950 if _hgexecutable is None:
951 hg = encoding.environ.get('HG')
951 hg = encoding.environ.get('HG')
952 mainmod = sys.modules['__main__']
952 mainmod = sys.modules['__main__']
953 if hg:
953 if hg:
954 _sethgexecutable(hg)
954 _sethgexecutable(hg)
955 elif mainfrozen():
955 elif mainfrozen():
956 if getattr(sys, 'frozen', None) == 'macosx_app':
956 if getattr(sys, 'frozen', None) == 'macosx_app':
957 # Env variable set by py2app
957 # Env variable set by py2app
958 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
958 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
959 else:
959 else:
960 _sethgexecutable(sys.executable)
960 _sethgexecutable(pycompat.sysexecutable)
961 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
961 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
962 _sethgexecutable(mainmod.__file__)
962 _sethgexecutable(mainmod.__file__)
963 else:
963 else:
964 exe = findexe('hg') or os.path.basename(sys.argv[0])
964 exe = findexe('hg') or os.path.basename(sys.argv[0])
965 _sethgexecutable(exe)
965 _sethgexecutable(exe)
966 return _hgexecutable
966 return _hgexecutable
967
967
968 def _sethgexecutable(path):
968 def _sethgexecutable(path):
969 """set location of the 'hg' executable"""
969 """set location of the 'hg' executable"""
970 global _hgexecutable
970 global _hgexecutable
971 _hgexecutable = path
971 _hgexecutable = path
972
972
973 def _isstdout(f):
973 def _isstdout(f):
974 fileno = getattr(f, 'fileno', None)
974 fileno = getattr(f, 'fileno', None)
975 return fileno and fileno() == sys.__stdout__.fileno()
975 return fileno and fileno() == sys.__stdout__.fileno()
976
976
977 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
977 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
978 '''enhanced shell command execution.
978 '''enhanced shell command execution.
979 run with environment maybe modified, maybe in different dir.
979 run with environment maybe modified, maybe in different dir.
980
980
981 if command fails and onerr is None, return status, else raise onerr
981 if command fails and onerr is None, return status, else raise onerr
982 object as exception.
982 object as exception.
983
983
984 if out is specified, it is assumed to be a file-like object that has a
984 if out is specified, it is assumed to be a file-like object that has a
985 write() method. stdout and stderr will be redirected to out.'''
985 write() method. stdout and stderr will be redirected to out.'''
986 if environ is None:
986 if environ is None:
987 environ = {}
987 environ = {}
988 try:
988 try:
989 stdout.flush()
989 stdout.flush()
990 except Exception:
990 except Exception:
991 pass
991 pass
992 def py2shell(val):
992 def py2shell(val):
993 'convert python object into string that is useful to shell'
993 'convert python object into string that is useful to shell'
994 if val is None or val is False:
994 if val is None or val is False:
995 return '0'
995 return '0'
996 if val is True:
996 if val is True:
997 return '1'
997 return '1'
998 return str(val)
998 return str(val)
999 origcmd = cmd
999 origcmd = cmd
1000 cmd = quotecommand(cmd)
1000 cmd = quotecommand(cmd)
1001 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1001 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1002 and sys.version_info[1] < 7):
1002 and sys.version_info[1] < 7):
1003 # subprocess kludge to work around issues in half-baked Python
1003 # subprocess kludge to work around issues in half-baked Python
1004 # ports, notably bichued/python:
1004 # ports, notably bichued/python:
1005 if not cwd is None:
1005 if not cwd is None:
1006 os.chdir(cwd)
1006 os.chdir(cwd)
1007 rc = os.system(cmd)
1007 rc = os.system(cmd)
1008 else:
1008 else:
1009 env = dict(encoding.environ)
1009 env = dict(encoding.environ)
1010 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1010 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1011 env['HG'] = hgexecutable()
1011 env['HG'] = hgexecutable()
1012 if out is None or _isstdout(out):
1012 if out is None or _isstdout(out):
1013 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1013 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1014 env=env, cwd=cwd)
1014 env=env, cwd=cwd)
1015 else:
1015 else:
1016 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1016 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1017 env=env, cwd=cwd, stdout=subprocess.PIPE,
1017 env=env, cwd=cwd, stdout=subprocess.PIPE,
1018 stderr=subprocess.STDOUT)
1018 stderr=subprocess.STDOUT)
1019 for line in iter(proc.stdout.readline, ''):
1019 for line in iter(proc.stdout.readline, ''):
1020 out.write(line)
1020 out.write(line)
1021 proc.wait()
1021 proc.wait()
1022 rc = proc.returncode
1022 rc = proc.returncode
1023 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1023 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1024 rc = 0
1024 rc = 0
1025 if rc and onerr:
1025 if rc and onerr:
1026 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
1026 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
1027 explainexit(rc)[0])
1027 explainexit(rc)[0])
1028 if errprefix:
1028 if errprefix:
1029 errmsg = '%s: %s' % (errprefix, errmsg)
1029 errmsg = '%s: %s' % (errprefix, errmsg)
1030 raise onerr(errmsg)
1030 raise onerr(errmsg)
1031 return rc
1031 return rc
1032
1032
1033 def checksignature(func):
1033 def checksignature(func):
1034 '''wrap a function with code to check for calling errors'''
1034 '''wrap a function with code to check for calling errors'''
1035 def check(*args, **kwargs):
1035 def check(*args, **kwargs):
1036 try:
1036 try:
1037 return func(*args, **kwargs)
1037 return func(*args, **kwargs)
1038 except TypeError:
1038 except TypeError:
1039 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1039 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1040 raise error.SignatureError
1040 raise error.SignatureError
1041 raise
1041 raise
1042
1042
1043 return check
1043 return check
1044
1044
1045 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1045 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1046 '''copy a file, preserving mode and optionally other stat info like
1046 '''copy a file, preserving mode and optionally other stat info like
1047 atime/mtime
1047 atime/mtime
1048
1048
1049 checkambig argument is used with filestat, and is useful only if
1049 checkambig argument is used with filestat, and is useful only if
1050 destination file is guarded by any lock (e.g. repo.lock or
1050 destination file is guarded by any lock (e.g. repo.lock or
1051 repo.wlock).
1051 repo.wlock).
1052
1052
1053 copystat and checkambig should be exclusive.
1053 copystat and checkambig should be exclusive.
1054 '''
1054 '''
1055 assert not (copystat and checkambig)
1055 assert not (copystat and checkambig)
1056 oldstat = None
1056 oldstat = None
1057 if os.path.lexists(dest):
1057 if os.path.lexists(dest):
1058 if checkambig:
1058 if checkambig:
1059 oldstat = checkambig and filestat(dest)
1059 oldstat = checkambig and filestat(dest)
1060 unlink(dest)
1060 unlink(dest)
1061 # hardlinks are problematic on CIFS, quietly ignore this flag
1061 # hardlinks are problematic on CIFS, quietly ignore this flag
1062 # until we find a way to work around it cleanly (issue4546)
1062 # until we find a way to work around it cleanly (issue4546)
1063 if False and hardlink:
1063 if False and hardlink:
1064 try:
1064 try:
1065 oslink(src, dest)
1065 oslink(src, dest)
1066 return
1066 return
1067 except (IOError, OSError):
1067 except (IOError, OSError):
1068 pass # fall back to normal copy
1068 pass # fall back to normal copy
1069 if os.path.islink(src):
1069 if os.path.islink(src):
1070 os.symlink(os.readlink(src), dest)
1070 os.symlink(os.readlink(src), dest)
1071 # copytime is ignored for symlinks, but in general copytime isn't needed
1071 # copytime is ignored for symlinks, but in general copytime isn't needed
1072 # for them anyway
1072 # for them anyway
1073 else:
1073 else:
1074 try:
1074 try:
1075 shutil.copyfile(src, dest)
1075 shutil.copyfile(src, dest)
1076 if copystat:
1076 if copystat:
1077 # copystat also copies mode
1077 # copystat also copies mode
1078 shutil.copystat(src, dest)
1078 shutil.copystat(src, dest)
1079 else:
1079 else:
1080 shutil.copymode(src, dest)
1080 shutil.copymode(src, dest)
1081 if oldstat and oldstat.stat:
1081 if oldstat and oldstat.stat:
1082 newstat = filestat(dest)
1082 newstat = filestat(dest)
1083 if newstat.isambig(oldstat):
1083 if newstat.isambig(oldstat):
1084 # stat of copied file is ambiguous to original one
1084 # stat of copied file is ambiguous to original one
1085 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1085 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1086 os.utime(dest, (advanced, advanced))
1086 os.utime(dest, (advanced, advanced))
1087 except shutil.Error as inst:
1087 except shutil.Error as inst:
1088 raise Abort(str(inst))
1088 raise Abort(str(inst))
1089
1089
1090 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1090 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1091 """Copy a directory tree using hardlinks if possible."""
1091 """Copy a directory tree using hardlinks if possible."""
1092 num = 0
1092 num = 0
1093
1093
1094 if hardlink is None:
1094 if hardlink is None:
1095 hardlink = (os.stat(src).st_dev ==
1095 hardlink = (os.stat(src).st_dev ==
1096 os.stat(os.path.dirname(dst)).st_dev)
1096 os.stat(os.path.dirname(dst)).st_dev)
1097 if hardlink:
1097 if hardlink:
1098 topic = _('linking')
1098 topic = _('linking')
1099 else:
1099 else:
1100 topic = _('copying')
1100 topic = _('copying')
1101
1101
1102 if os.path.isdir(src):
1102 if os.path.isdir(src):
1103 os.mkdir(dst)
1103 os.mkdir(dst)
1104 for name, kind in osutil.listdir(src):
1104 for name, kind in osutil.listdir(src):
1105 srcname = os.path.join(src, name)
1105 srcname = os.path.join(src, name)
1106 dstname = os.path.join(dst, name)
1106 dstname = os.path.join(dst, name)
1107 def nprog(t, pos):
1107 def nprog(t, pos):
1108 if pos is not None:
1108 if pos is not None:
1109 return progress(t, pos + num)
1109 return progress(t, pos + num)
1110 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1110 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1111 num += n
1111 num += n
1112 else:
1112 else:
1113 if hardlink:
1113 if hardlink:
1114 try:
1114 try:
1115 oslink(src, dst)
1115 oslink(src, dst)
1116 except (IOError, OSError):
1116 except (IOError, OSError):
1117 hardlink = False
1117 hardlink = False
1118 shutil.copy(src, dst)
1118 shutil.copy(src, dst)
1119 else:
1119 else:
1120 shutil.copy(src, dst)
1120 shutil.copy(src, dst)
1121 num += 1
1121 num += 1
1122 progress(topic, num)
1122 progress(topic, num)
1123 progress(topic, None)
1123 progress(topic, None)
1124
1124
1125 return hardlink, num
1125 return hardlink, num
1126
1126
1127 _winreservednames = '''con prn aux nul
1127 _winreservednames = '''con prn aux nul
1128 com1 com2 com3 com4 com5 com6 com7 com8 com9
1128 com1 com2 com3 com4 com5 com6 com7 com8 com9
1129 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1129 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1130 _winreservedchars = ':*?"<>|'
1130 _winreservedchars = ':*?"<>|'
1131 def checkwinfilename(path):
1131 def checkwinfilename(path):
1132 r'''Check that the base-relative path is a valid filename on Windows.
1132 r'''Check that the base-relative path is a valid filename on Windows.
1133 Returns None if the path is ok, or a UI string describing the problem.
1133 Returns None if the path is ok, or a UI string describing the problem.
1134
1134
1135 >>> checkwinfilename("just/a/normal/path")
1135 >>> checkwinfilename("just/a/normal/path")
1136 >>> checkwinfilename("foo/bar/con.xml")
1136 >>> checkwinfilename("foo/bar/con.xml")
1137 "filename contains 'con', which is reserved on Windows"
1137 "filename contains 'con', which is reserved on Windows"
1138 >>> checkwinfilename("foo/con.xml/bar")
1138 >>> checkwinfilename("foo/con.xml/bar")
1139 "filename contains 'con', which is reserved on Windows"
1139 "filename contains 'con', which is reserved on Windows"
1140 >>> checkwinfilename("foo/bar/xml.con")
1140 >>> checkwinfilename("foo/bar/xml.con")
1141 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1141 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1142 "filename contains 'AUX', which is reserved on Windows"
1142 "filename contains 'AUX', which is reserved on Windows"
1143 >>> checkwinfilename("foo/bar/bla:.txt")
1143 >>> checkwinfilename("foo/bar/bla:.txt")
1144 "filename contains ':', which is reserved on Windows"
1144 "filename contains ':', which is reserved on Windows"
1145 >>> checkwinfilename("foo/bar/b\07la.txt")
1145 >>> checkwinfilename("foo/bar/b\07la.txt")
1146 "filename contains '\\x07', which is invalid on Windows"
1146 "filename contains '\\x07', which is invalid on Windows"
1147 >>> checkwinfilename("foo/bar/bla ")
1147 >>> checkwinfilename("foo/bar/bla ")
1148 "filename ends with ' ', which is not allowed on Windows"
1148 "filename ends with ' ', which is not allowed on Windows"
1149 >>> checkwinfilename("../bar")
1149 >>> checkwinfilename("../bar")
1150 >>> checkwinfilename("foo\\")
1150 >>> checkwinfilename("foo\\")
1151 "filename ends with '\\', which is invalid on Windows"
1151 "filename ends with '\\', which is invalid on Windows"
1152 >>> checkwinfilename("foo\\/bar")
1152 >>> checkwinfilename("foo\\/bar")
1153 "directory name ends with '\\', which is invalid on Windows"
1153 "directory name ends with '\\', which is invalid on Windows"
1154 '''
1154 '''
1155 if path.endswith('\\'):
1155 if path.endswith('\\'):
1156 return _("filename ends with '\\', which is invalid on Windows")
1156 return _("filename ends with '\\', which is invalid on Windows")
1157 if '\\/' in path:
1157 if '\\/' in path:
1158 return _("directory name ends with '\\', which is invalid on Windows")
1158 return _("directory name ends with '\\', which is invalid on Windows")
1159 for n in path.replace('\\', '/').split('/'):
1159 for n in path.replace('\\', '/').split('/'):
1160 if not n:
1160 if not n:
1161 continue
1161 continue
1162 for c in n:
1162 for c in n:
1163 if c in _winreservedchars:
1163 if c in _winreservedchars:
1164 return _("filename contains '%s', which is reserved "
1164 return _("filename contains '%s', which is reserved "
1165 "on Windows") % c
1165 "on Windows") % c
1166 if ord(c) <= 31:
1166 if ord(c) <= 31:
1167 return _("filename contains %r, which is invalid "
1167 return _("filename contains %r, which is invalid "
1168 "on Windows") % c
1168 "on Windows") % c
1169 base = n.split('.')[0]
1169 base = n.split('.')[0]
1170 if base and base.lower() in _winreservednames:
1170 if base and base.lower() in _winreservednames:
1171 return _("filename contains '%s', which is reserved "
1171 return _("filename contains '%s', which is reserved "
1172 "on Windows") % base
1172 "on Windows") % base
1173 t = n[-1]
1173 t = n[-1]
1174 if t in '. ' and n not in '..':
1174 if t in '. ' and n not in '..':
1175 return _("filename ends with '%s', which is not allowed "
1175 return _("filename ends with '%s', which is not allowed "
1176 "on Windows") % t
1176 "on Windows") % t
1177
1177
1178 if pycompat.osname == 'nt':
1178 if pycompat.osname == 'nt':
1179 checkosfilename = checkwinfilename
1179 checkosfilename = checkwinfilename
1180 else:
1180 else:
1181 checkosfilename = platform.checkosfilename
1181 checkosfilename = platform.checkosfilename
1182
1182
1183 def makelock(info, pathname):
1183 def makelock(info, pathname):
1184 try:
1184 try:
1185 return os.symlink(info, pathname)
1185 return os.symlink(info, pathname)
1186 except OSError as why:
1186 except OSError as why:
1187 if why.errno == errno.EEXIST:
1187 if why.errno == errno.EEXIST:
1188 raise
1188 raise
1189 except AttributeError: # no symlink in os
1189 except AttributeError: # no symlink in os
1190 pass
1190 pass
1191
1191
1192 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1192 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1193 os.write(ld, info)
1193 os.write(ld, info)
1194 os.close(ld)
1194 os.close(ld)
1195
1195
1196 def readlock(pathname):
1196 def readlock(pathname):
1197 try:
1197 try:
1198 return os.readlink(pathname)
1198 return os.readlink(pathname)
1199 except OSError as why:
1199 except OSError as why:
1200 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1200 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1201 raise
1201 raise
1202 except AttributeError: # no symlink in os
1202 except AttributeError: # no symlink in os
1203 pass
1203 pass
1204 fp = posixfile(pathname)
1204 fp = posixfile(pathname)
1205 r = fp.read()
1205 r = fp.read()
1206 fp.close()
1206 fp.close()
1207 return r
1207 return r
1208
1208
1209 def fstat(fp):
1209 def fstat(fp):
1210 '''stat file object that may not have fileno method.'''
1210 '''stat file object that may not have fileno method.'''
1211 try:
1211 try:
1212 return os.fstat(fp.fileno())
1212 return os.fstat(fp.fileno())
1213 except AttributeError:
1213 except AttributeError:
1214 return os.stat(fp.name)
1214 return os.stat(fp.name)
1215
1215
1216 # File system features
1216 # File system features
1217
1217
1218 def fscasesensitive(path):
1218 def fscasesensitive(path):
1219 """
1219 """
1220 Return true if the given path is on a case-sensitive filesystem
1220 Return true if the given path is on a case-sensitive filesystem
1221
1221
1222 Requires a path (like /foo/.hg) ending with a foldable final
1222 Requires a path (like /foo/.hg) ending with a foldable final
1223 directory component.
1223 directory component.
1224 """
1224 """
1225 s1 = os.lstat(path)
1225 s1 = os.lstat(path)
1226 d, b = os.path.split(path)
1226 d, b = os.path.split(path)
1227 b2 = b.upper()
1227 b2 = b.upper()
1228 if b == b2:
1228 if b == b2:
1229 b2 = b.lower()
1229 b2 = b.lower()
1230 if b == b2:
1230 if b == b2:
1231 return True # no evidence against case sensitivity
1231 return True # no evidence against case sensitivity
1232 p2 = os.path.join(d, b2)
1232 p2 = os.path.join(d, b2)
1233 try:
1233 try:
1234 s2 = os.lstat(p2)
1234 s2 = os.lstat(p2)
1235 if s2 == s1:
1235 if s2 == s1:
1236 return False
1236 return False
1237 return True
1237 return True
1238 except OSError:
1238 except OSError:
1239 return True
1239 return True
1240
1240
1241 try:
1241 try:
1242 import re2
1242 import re2
1243 _re2 = None
1243 _re2 = None
1244 except ImportError:
1244 except ImportError:
1245 _re2 = False
1245 _re2 = False
1246
1246
1247 class _re(object):
1247 class _re(object):
1248 def _checkre2(self):
1248 def _checkre2(self):
1249 global _re2
1249 global _re2
1250 try:
1250 try:
1251 # check if match works, see issue3964
1251 # check if match works, see issue3964
1252 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1252 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1253 except ImportError:
1253 except ImportError:
1254 _re2 = False
1254 _re2 = False
1255
1255
1256 def compile(self, pat, flags=0):
1256 def compile(self, pat, flags=0):
1257 '''Compile a regular expression, using re2 if possible
1257 '''Compile a regular expression, using re2 if possible
1258
1258
1259 For best performance, use only re2-compatible regexp features. The
1259 For best performance, use only re2-compatible regexp features. The
1260 only flags from the re module that are re2-compatible are
1260 only flags from the re module that are re2-compatible are
1261 IGNORECASE and MULTILINE.'''
1261 IGNORECASE and MULTILINE.'''
1262 if _re2 is None:
1262 if _re2 is None:
1263 self._checkre2()
1263 self._checkre2()
1264 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1264 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1265 if flags & remod.IGNORECASE:
1265 if flags & remod.IGNORECASE:
1266 pat = '(?i)' + pat
1266 pat = '(?i)' + pat
1267 if flags & remod.MULTILINE:
1267 if flags & remod.MULTILINE:
1268 pat = '(?m)' + pat
1268 pat = '(?m)' + pat
1269 try:
1269 try:
1270 return re2.compile(pat)
1270 return re2.compile(pat)
1271 except re2.error:
1271 except re2.error:
1272 pass
1272 pass
1273 return remod.compile(pat, flags)
1273 return remod.compile(pat, flags)
1274
1274
1275 @propertycache
1275 @propertycache
1276 def escape(self):
1276 def escape(self):
1277 '''Return the version of escape corresponding to self.compile.
1277 '''Return the version of escape corresponding to self.compile.
1278
1278
1279 This is imperfect because whether re2 or re is used for a particular
1279 This is imperfect because whether re2 or re is used for a particular
1280 function depends on the flags, etc, but it's the best we can do.
1280 function depends on the flags, etc, but it's the best we can do.
1281 '''
1281 '''
1282 global _re2
1282 global _re2
1283 if _re2 is None:
1283 if _re2 is None:
1284 self._checkre2()
1284 self._checkre2()
1285 if _re2:
1285 if _re2:
1286 return re2.escape
1286 return re2.escape
1287 else:
1287 else:
1288 return remod.escape
1288 return remod.escape
1289
1289
1290 re = _re()
1290 re = _re()
1291
1291
1292 _fspathcache = {}
1292 _fspathcache = {}
1293 def fspath(name, root):
1293 def fspath(name, root):
1294 '''Get name in the case stored in the filesystem
1294 '''Get name in the case stored in the filesystem
1295
1295
1296 The name should be relative to root, and be normcase-ed for efficiency.
1296 The name should be relative to root, and be normcase-ed for efficiency.
1297
1297
1298 Note that this function is unnecessary, and should not be
1298 Note that this function is unnecessary, and should not be
1299 called, for case-sensitive filesystems (simply because it's expensive).
1299 called, for case-sensitive filesystems (simply because it's expensive).
1300
1300
1301 The root should be normcase-ed, too.
1301 The root should be normcase-ed, too.
1302 '''
1302 '''
1303 def _makefspathcacheentry(dir):
1303 def _makefspathcacheentry(dir):
1304 return dict((normcase(n), n) for n in os.listdir(dir))
1304 return dict((normcase(n), n) for n in os.listdir(dir))
1305
1305
1306 seps = pycompat.ossep
1306 seps = pycompat.ossep
1307 if pycompat.osaltsep:
1307 if pycompat.osaltsep:
1308 seps = seps + pycompat.osaltsep
1308 seps = seps + pycompat.osaltsep
1309 # Protect backslashes. This gets silly very quickly.
1309 # Protect backslashes. This gets silly very quickly.
1310 seps.replace('\\','\\\\')
1310 seps.replace('\\','\\\\')
1311 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1311 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1312 dir = os.path.normpath(root)
1312 dir = os.path.normpath(root)
1313 result = []
1313 result = []
1314 for part, sep in pattern.findall(name):
1314 for part, sep in pattern.findall(name):
1315 if sep:
1315 if sep:
1316 result.append(sep)
1316 result.append(sep)
1317 continue
1317 continue
1318
1318
1319 if dir not in _fspathcache:
1319 if dir not in _fspathcache:
1320 _fspathcache[dir] = _makefspathcacheentry(dir)
1320 _fspathcache[dir] = _makefspathcacheentry(dir)
1321 contents = _fspathcache[dir]
1321 contents = _fspathcache[dir]
1322
1322
1323 found = contents.get(part)
1323 found = contents.get(part)
1324 if not found:
1324 if not found:
1325 # retry "once per directory" per "dirstate.walk" which
1325 # retry "once per directory" per "dirstate.walk" which
1326 # may take place for each patches of "hg qpush", for example
1326 # may take place for each patches of "hg qpush", for example
1327 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1327 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1328 found = contents.get(part)
1328 found = contents.get(part)
1329
1329
1330 result.append(found or part)
1330 result.append(found or part)
1331 dir = os.path.join(dir, part)
1331 dir = os.path.join(dir, part)
1332
1332
1333 return ''.join(result)
1333 return ''.join(result)
1334
1334
1335 def checknlink(testfile):
1335 def checknlink(testfile):
1336 '''check whether hardlink count reporting works properly'''
1336 '''check whether hardlink count reporting works properly'''
1337
1337
1338 # testfile may be open, so we need a separate file for checking to
1338 # testfile may be open, so we need a separate file for checking to
1339 # work around issue2543 (or testfile may get lost on Samba shares)
1339 # work around issue2543 (or testfile may get lost on Samba shares)
1340 f1 = testfile + ".hgtmp1"
1340 f1 = testfile + ".hgtmp1"
1341 if os.path.lexists(f1):
1341 if os.path.lexists(f1):
1342 return False
1342 return False
1343 try:
1343 try:
1344 posixfile(f1, 'w').close()
1344 posixfile(f1, 'w').close()
1345 except IOError:
1345 except IOError:
1346 try:
1346 try:
1347 os.unlink(f1)
1347 os.unlink(f1)
1348 except OSError:
1348 except OSError:
1349 pass
1349 pass
1350 return False
1350 return False
1351
1351
1352 f2 = testfile + ".hgtmp2"
1352 f2 = testfile + ".hgtmp2"
1353 fd = None
1353 fd = None
1354 try:
1354 try:
1355 oslink(f1, f2)
1355 oslink(f1, f2)
1356 # nlinks() may behave differently for files on Windows shares if
1356 # nlinks() may behave differently for files on Windows shares if
1357 # the file is open.
1357 # the file is open.
1358 fd = posixfile(f2)
1358 fd = posixfile(f2)
1359 return nlinks(f2) > 1
1359 return nlinks(f2) > 1
1360 except OSError:
1360 except OSError:
1361 return False
1361 return False
1362 finally:
1362 finally:
1363 if fd is not None:
1363 if fd is not None:
1364 fd.close()
1364 fd.close()
1365 for f in (f1, f2):
1365 for f in (f1, f2):
1366 try:
1366 try:
1367 os.unlink(f)
1367 os.unlink(f)
1368 except OSError:
1368 except OSError:
1369 pass
1369 pass
1370
1370
1371 def endswithsep(path):
1371 def endswithsep(path):
1372 '''Check path ends with os.sep or os.altsep.'''
1372 '''Check path ends with os.sep or os.altsep.'''
1373 return (path.endswith(pycompat.ossep)
1373 return (path.endswith(pycompat.ossep)
1374 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1374 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1375
1375
1376 def splitpath(path):
1376 def splitpath(path):
1377 '''Split path by os.sep.
1377 '''Split path by os.sep.
1378 Note that this function does not use os.altsep because this is
1378 Note that this function does not use os.altsep because this is
1379 an alternative of simple "xxx.split(os.sep)".
1379 an alternative of simple "xxx.split(os.sep)".
1380 It is recommended to use os.path.normpath() before using this
1380 It is recommended to use os.path.normpath() before using this
1381 function if need.'''
1381 function if need.'''
1382 return path.split(pycompat.ossep)
1382 return path.split(pycompat.ossep)
1383
1383
1384 def gui():
1384 def gui():
1385 '''Are we running in a GUI?'''
1385 '''Are we running in a GUI?'''
1386 if pycompat.sysplatform == 'darwin':
1386 if pycompat.sysplatform == 'darwin':
1387 if 'SSH_CONNECTION' in encoding.environ:
1387 if 'SSH_CONNECTION' in encoding.environ:
1388 # handle SSH access to a box where the user is logged in
1388 # handle SSH access to a box where the user is logged in
1389 return False
1389 return False
1390 elif getattr(osutil, 'isgui', None):
1390 elif getattr(osutil, 'isgui', None):
1391 # check if a CoreGraphics session is available
1391 # check if a CoreGraphics session is available
1392 return osutil.isgui()
1392 return osutil.isgui()
1393 else:
1393 else:
1394 # pure build; use a safe default
1394 # pure build; use a safe default
1395 return True
1395 return True
1396 else:
1396 else:
1397 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1397 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1398
1398
1399 def mktempcopy(name, emptyok=False, createmode=None):
1399 def mktempcopy(name, emptyok=False, createmode=None):
1400 """Create a temporary file with the same contents from name
1400 """Create a temporary file with the same contents from name
1401
1401
1402 The permission bits are copied from the original file.
1402 The permission bits are copied from the original file.
1403
1403
1404 If the temporary file is going to be truncated immediately, you
1404 If the temporary file is going to be truncated immediately, you
1405 can use emptyok=True as an optimization.
1405 can use emptyok=True as an optimization.
1406
1406
1407 Returns the name of the temporary file.
1407 Returns the name of the temporary file.
1408 """
1408 """
1409 d, fn = os.path.split(name)
1409 d, fn = os.path.split(name)
1410 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1410 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1411 os.close(fd)
1411 os.close(fd)
1412 # Temporary files are created with mode 0600, which is usually not
1412 # Temporary files are created with mode 0600, which is usually not
1413 # what we want. If the original file already exists, just copy
1413 # what we want. If the original file already exists, just copy
1414 # its mode. Otherwise, manually obey umask.
1414 # its mode. Otherwise, manually obey umask.
1415 copymode(name, temp, createmode)
1415 copymode(name, temp, createmode)
1416 if emptyok:
1416 if emptyok:
1417 return temp
1417 return temp
1418 try:
1418 try:
1419 try:
1419 try:
1420 ifp = posixfile(name, "rb")
1420 ifp = posixfile(name, "rb")
1421 except IOError as inst:
1421 except IOError as inst:
1422 if inst.errno == errno.ENOENT:
1422 if inst.errno == errno.ENOENT:
1423 return temp
1423 return temp
1424 if not getattr(inst, 'filename', None):
1424 if not getattr(inst, 'filename', None):
1425 inst.filename = name
1425 inst.filename = name
1426 raise
1426 raise
1427 ofp = posixfile(temp, "wb")
1427 ofp = posixfile(temp, "wb")
1428 for chunk in filechunkiter(ifp):
1428 for chunk in filechunkiter(ifp):
1429 ofp.write(chunk)
1429 ofp.write(chunk)
1430 ifp.close()
1430 ifp.close()
1431 ofp.close()
1431 ofp.close()
1432 except: # re-raises
1432 except: # re-raises
1433 try: os.unlink(temp)
1433 try: os.unlink(temp)
1434 except OSError: pass
1434 except OSError: pass
1435 raise
1435 raise
1436 return temp
1436 return temp
1437
1437
1438 class filestat(object):
1438 class filestat(object):
1439 """help to exactly detect change of a file
1439 """help to exactly detect change of a file
1440
1440
1441 'stat' attribute is result of 'os.stat()' if specified 'path'
1441 'stat' attribute is result of 'os.stat()' if specified 'path'
1442 exists. Otherwise, it is None. This can avoid preparative
1442 exists. Otherwise, it is None. This can avoid preparative
1443 'exists()' examination on client side of this class.
1443 'exists()' examination on client side of this class.
1444 """
1444 """
1445 def __init__(self, path):
1445 def __init__(self, path):
1446 try:
1446 try:
1447 self.stat = os.stat(path)
1447 self.stat = os.stat(path)
1448 except OSError as err:
1448 except OSError as err:
1449 if err.errno != errno.ENOENT:
1449 if err.errno != errno.ENOENT:
1450 raise
1450 raise
1451 self.stat = None
1451 self.stat = None
1452
1452
1453 __hash__ = object.__hash__
1453 __hash__ = object.__hash__
1454
1454
1455 def __eq__(self, old):
1455 def __eq__(self, old):
1456 try:
1456 try:
1457 # if ambiguity between stat of new and old file is
1457 # if ambiguity between stat of new and old file is
1458 # avoided, comparison of size, ctime and mtime is enough
1458 # avoided, comparison of size, ctime and mtime is enough
1459 # to exactly detect change of a file regardless of platform
1459 # to exactly detect change of a file regardless of platform
1460 return (self.stat.st_size == old.stat.st_size and
1460 return (self.stat.st_size == old.stat.st_size and
1461 self.stat.st_ctime == old.stat.st_ctime and
1461 self.stat.st_ctime == old.stat.st_ctime and
1462 self.stat.st_mtime == old.stat.st_mtime)
1462 self.stat.st_mtime == old.stat.st_mtime)
1463 except AttributeError:
1463 except AttributeError:
1464 return False
1464 return False
1465
1465
1466 def isambig(self, old):
1466 def isambig(self, old):
1467 """Examine whether new (= self) stat is ambiguous against old one
1467 """Examine whether new (= self) stat is ambiguous against old one
1468
1468
1469 "S[N]" below means stat of a file at N-th change:
1469 "S[N]" below means stat of a file at N-th change:
1470
1470
1471 - S[n-1].ctime < S[n].ctime: can detect change of a file
1471 - S[n-1].ctime < S[n].ctime: can detect change of a file
1472 - S[n-1].ctime == S[n].ctime
1472 - S[n-1].ctime == S[n].ctime
1473 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1473 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1474 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1474 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1475 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1475 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1476 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1476 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1477
1477
1478 Case (*2) above means that a file was changed twice or more at
1478 Case (*2) above means that a file was changed twice or more at
1479 same time in sec (= S[n-1].ctime), and comparison of timestamp
1479 same time in sec (= S[n-1].ctime), and comparison of timestamp
1480 is ambiguous.
1480 is ambiguous.
1481
1481
1482 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1482 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1483 timestamp is ambiguous".
1483 timestamp is ambiguous".
1484
1484
1485 But advancing mtime only in case (*2) doesn't work as
1485 But advancing mtime only in case (*2) doesn't work as
1486 expected, because naturally advanced S[n].mtime in case (*1)
1486 expected, because naturally advanced S[n].mtime in case (*1)
1487 might be equal to manually advanced S[n-1 or earlier].mtime.
1487 might be equal to manually advanced S[n-1 or earlier].mtime.
1488
1488
1489 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1489 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1490 treated as ambiguous regardless of mtime, to avoid overlooking
1490 treated as ambiguous regardless of mtime, to avoid overlooking
1491 by confliction between such mtime.
1491 by confliction between such mtime.
1492
1492
1493 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1493 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1494 S[n].mtime", even if size of a file isn't changed.
1494 S[n].mtime", even if size of a file isn't changed.
1495 """
1495 """
1496 try:
1496 try:
1497 return (self.stat.st_ctime == old.stat.st_ctime)
1497 return (self.stat.st_ctime == old.stat.st_ctime)
1498 except AttributeError:
1498 except AttributeError:
1499 return False
1499 return False
1500
1500
1501 def avoidambig(self, path, old):
1501 def avoidambig(self, path, old):
1502 """Change file stat of specified path to avoid ambiguity
1502 """Change file stat of specified path to avoid ambiguity
1503
1503
1504 'old' should be previous filestat of 'path'.
1504 'old' should be previous filestat of 'path'.
1505
1505
1506 This skips avoiding ambiguity, if a process doesn't have
1506 This skips avoiding ambiguity, if a process doesn't have
1507 appropriate privileges for 'path'.
1507 appropriate privileges for 'path'.
1508 """
1508 """
1509 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1509 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1510 try:
1510 try:
1511 os.utime(path, (advanced, advanced))
1511 os.utime(path, (advanced, advanced))
1512 except OSError as inst:
1512 except OSError as inst:
1513 if inst.errno == errno.EPERM:
1513 if inst.errno == errno.EPERM:
1514 # utime() on the file created by another user causes EPERM,
1514 # utime() on the file created by another user causes EPERM,
1515 # if a process doesn't have appropriate privileges
1515 # if a process doesn't have appropriate privileges
1516 return
1516 return
1517 raise
1517 raise
1518
1518
1519 def __ne__(self, other):
1519 def __ne__(self, other):
1520 return not self == other
1520 return not self == other
1521
1521
1522 class atomictempfile(object):
1522 class atomictempfile(object):
1523 '''writable file object that atomically updates a file
1523 '''writable file object that atomically updates a file
1524
1524
1525 All writes will go to a temporary copy of the original file. Call
1525 All writes will go to a temporary copy of the original file. Call
1526 close() when you are done writing, and atomictempfile will rename
1526 close() when you are done writing, and atomictempfile will rename
1527 the temporary copy to the original name, making the changes
1527 the temporary copy to the original name, making the changes
1528 visible. If the object is destroyed without being closed, all your
1528 visible. If the object is destroyed without being closed, all your
1529 writes are discarded.
1529 writes are discarded.
1530
1530
1531 checkambig argument of constructor is used with filestat, and is
1531 checkambig argument of constructor is used with filestat, and is
1532 useful only if target file is guarded by any lock (e.g. repo.lock
1532 useful only if target file is guarded by any lock (e.g. repo.lock
1533 or repo.wlock).
1533 or repo.wlock).
1534 '''
1534 '''
1535 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1535 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1536 self.__name = name # permanent name
1536 self.__name = name # permanent name
1537 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1537 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1538 createmode=createmode)
1538 createmode=createmode)
1539 self._fp = posixfile(self._tempname, mode)
1539 self._fp = posixfile(self._tempname, mode)
1540 self._checkambig = checkambig
1540 self._checkambig = checkambig
1541
1541
1542 # delegated methods
1542 # delegated methods
1543 self.read = self._fp.read
1543 self.read = self._fp.read
1544 self.write = self._fp.write
1544 self.write = self._fp.write
1545 self.seek = self._fp.seek
1545 self.seek = self._fp.seek
1546 self.tell = self._fp.tell
1546 self.tell = self._fp.tell
1547 self.fileno = self._fp.fileno
1547 self.fileno = self._fp.fileno
1548
1548
1549 def close(self):
1549 def close(self):
1550 if not self._fp.closed:
1550 if not self._fp.closed:
1551 self._fp.close()
1551 self._fp.close()
1552 filename = localpath(self.__name)
1552 filename = localpath(self.__name)
1553 oldstat = self._checkambig and filestat(filename)
1553 oldstat = self._checkambig and filestat(filename)
1554 if oldstat and oldstat.stat:
1554 if oldstat and oldstat.stat:
1555 rename(self._tempname, filename)
1555 rename(self._tempname, filename)
1556 newstat = filestat(filename)
1556 newstat = filestat(filename)
1557 if newstat.isambig(oldstat):
1557 if newstat.isambig(oldstat):
1558 # stat of changed file is ambiguous to original one
1558 # stat of changed file is ambiguous to original one
1559 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1559 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1560 os.utime(filename, (advanced, advanced))
1560 os.utime(filename, (advanced, advanced))
1561 else:
1561 else:
1562 rename(self._tempname, filename)
1562 rename(self._tempname, filename)
1563
1563
1564 def discard(self):
1564 def discard(self):
1565 if not self._fp.closed:
1565 if not self._fp.closed:
1566 try:
1566 try:
1567 os.unlink(self._tempname)
1567 os.unlink(self._tempname)
1568 except OSError:
1568 except OSError:
1569 pass
1569 pass
1570 self._fp.close()
1570 self._fp.close()
1571
1571
1572 def __del__(self):
1572 def __del__(self):
1573 if safehasattr(self, '_fp'): # constructor actually did something
1573 if safehasattr(self, '_fp'): # constructor actually did something
1574 self.discard()
1574 self.discard()
1575
1575
1576 def __enter__(self):
1576 def __enter__(self):
1577 return self
1577 return self
1578
1578
1579 def __exit__(self, exctype, excvalue, traceback):
1579 def __exit__(self, exctype, excvalue, traceback):
1580 if exctype is not None:
1580 if exctype is not None:
1581 self.discard()
1581 self.discard()
1582 else:
1582 else:
1583 self.close()
1583 self.close()
1584
1584
1585 def makedirs(name, mode=None, notindexed=False):
1585 def makedirs(name, mode=None, notindexed=False):
1586 """recursive directory creation with parent mode inheritance
1586 """recursive directory creation with parent mode inheritance
1587
1587
1588 Newly created directories are marked as "not to be indexed by
1588 Newly created directories are marked as "not to be indexed by
1589 the content indexing service", if ``notindexed`` is specified
1589 the content indexing service", if ``notindexed`` is specified
1590 for "write" mode access.
1590 for "write" mode access.
1591 """
1591 """
1592 try:
1592 try:
1593 makedir(name, notindexed)
1593 makedir(name, notindexed)
1594 except OSError as err:
1594 except OSError as err:
1595 if err.errno == errno.EEXIST:
1595 if err.errno == errno.EEXIST:
1596 return
1596 return
1597 if err.errno != errno.ENOENT or not name:
1597 if err.errno != errno.ENOENT or not name:
1598 raise
1598 raise
1599 parent = os.path.dirname(os.path.abspath(name))
1599 parent = os.path.dirname(os.path.abspath(name))
1600 if parent == name:
1600 if parent == name:
1601 raise
1601 raise
1602 makedirs(parent, mode, notindexed)
1602 makedirs(parent, mode, notindexed)
1603 try:
1603 try:
1604 makedir(name, notindexed)
1604 makedir(name, notindexed)
1605 except OSError as err:
1605 except OSError as err:
1606 # Catch EEXIST to handle races
1606 # Catch EEXIST to handle races
1607 if err.errno == errno.EEXIST:
1607 if err.errno == errno.EEXIST:
1608 return
1608 return
1609 raise
1609 raise
1610 if mode is not None:
1610 if mode is not None:
1611 os.chmod(name, mode)
1611 os.chmod(name, mode)
1612
1612
1613 def readfile(path):
1613 def readfile(path):
1614 with open(path, 'rb') as fp:
1614 with open(path, 'rb') as fp:
1615 return fp.read()
1615 return fp.read()
1616
1616
1617 def writefile(path, text):
1617 def writefile(path, text):
1618 with open(path, 'wb') as fp:
1618 with open(path, 'wb') as fp:
1619 fp.write(text)
1619 fp.write(text)
1620
1620
1621 def appendfile(path, text):
1621 def appendfile(path, text):
1622 with open(path, 'ab') as fp:
1622 with open(path, 'ab') as fp:
1623 fp.write(text)
1623 fp.write(text)
1624
1624
1625 class chunkbuffer(object):
1625 class chunkbuffer(object):
1626 """Allow arbitrary sized chunks of data to be efficiently read from an
1626 """Allow arbitrary sized chunks of data to be efficiently read from an
1627 iterator over chunks of arbitrary size."""
1627 iterator over chunks of arbitrary size."""
1628
1628
1629 def __init__(self, in_iter):
1629 def __init__(self, in_iter):
1630 """in_iter is the iterator that's iterating over the input chunks.
1630 """in_iter is the iterator that's iterating over the input chunks.
1631 targetsize is how big a buffer to try to maintain."""
1631 targetsize is how big a buffer to try to maintain."""
1632 def splitbig(chunks):
1632 def splitbig(chunks):
1633 for chunk in chunks:
1633 for chunk in chunks:
1634 if len(chunk) > 2**20:
1634 if len(chunk) > 2**20:
1635 pos = 0
1635 pos = 0
1636 while pos < len(chunk):
1636 while pos < len(chunk):
1637 end = pos + 2 ** 18
1637 end = pos + 2 ** 18
1638 yield chunk[pos:end]
1638 yield chunk[pos:end]
1639 pos = end
1639 pos = end
1640 else:
1640 else:
1641 yield chunk
1641 yield chunk
1642 self.iter = splitbig(in_iter)
1642 self.iter = splitbig(in_iter)
1643 self._queue = collections.deque()
1643 self._queue = collections.deque()
1644 self._chunkoffset = 0
1644 self._chunkoffset = 0
1645
1645
1646 def read(self, l=None):
1646 def read(self, l=None):
1647 """Read L bytes of data from the iterator of chunks of data.
1647 """Read L bytes of data from the iterator of chunks of data.
1648 Returns less than L bytes if the iterator runs dry.
1648 Returns less than L bytes if the iterator runs dry.
1649
1649
1650 If size parameter is omitted, read everything"""
1650 If size parameter is omitted, read everything"""
1651 if l is None:
1651 if l is None:
1652 return ''.join(self.iter)
1652 return ''.join(self.iter)
1653
1653
1654 left = l
1654 left = l
1655 buf = []
1655 buf = []
1656 queue = self._queue
1656 queue = self._queue
1657 while left > 0:
1657 while left > 0:
1658 # refill the queue
1658 # refill the queue
1659 if not queue:
1659 if not queue:
1660 target = 2**18
1660 target = 2**18
1661 for chunk in self.iter:
1661 for chunk in self.iter:
1662 queue.append(chunk)
1662 queue.append(chunk)
1663 target -= len(chunk)
1663 target -= len(chunk)
1664 if target <= 0:
1664 if target <= 0:
1665 break
1665 break
1666 if not queue:
1666 if not queue:
1667 break
1667 break
1668
1668
1669 # The easy way to do this would be to queue.popleft(), modify the
1669 # The easy way to do this would be to queue.popleft(), modify the
1670 # chunk (if necessary), then queue.appendleft(). However, for cases
1670 # chunk (if necessary), then queue.appendleft(). However, for cases
1671 # where we read partial chunk content, this incurs 2 dequeue
1671 # where we read partial chunk content, this incurs 2 dequeue
1672 # mutations and creates a new str for the remaining chunk in the
1672 # mutations and creates a new str for the remaining chunk in the
1673 # queue. Our code below avoids this overhead.
1673 # queue. Our code below avoids this overhead.
1674
1674
1675 chunk = queue[0]
1675 chunk = queue[0]
1676 chunkl = len(chunk)
1676 chunkl = len(chunk)
1677 offset = self._chunkoffset
1677 offset = self._chunkoffset
1678
1678
1679 # Use full chunk.
1679 # Use full chunk.
1680 if offset == 0 and left >= chunkl:
1680 if offset == 0 and left >= chunkl:
1681 left -= chunkl
1681 left -= chunkl
1682 queue.popleft()
1682 queue.popleft()
1683 buf.append(chunk)
1683 buf.append(chunk)
1684 # self._chunkoffset remains at 0.
1684 # self._chunkoffset remains at 0.
1685 continue
1685 continue
1686
1686
1687 chunkremaining = chunkl - offset
1687 chunkremaining = chunkl - offset
1688
1688
1689 # Use all of unconsumed part of chunk.
1689 # Use all of unconsumed part of chunk.
1690 if left >= chunkremaining:
1690 if left >= chunkremaining:
1691 left -= chunkremaining
1691 left -= chunkremaining
1692 queue.popleft()
1692 queue.popleft()
1693 # offset == 0 is enabled by block above, so this won't merely
1693 # offset == 0 is enabled by block above, so this won't merely
1694 # copy via ``chunk[0:]``.
1694 # copy via ``chunk[0:]``.
1695 buf.append(chunk[offset:])
1695 buf.append(chunk[offset:])
1696 self._chunkoffset = 0
1696 self._chunkoffset = 0
1697
1697
1698 # Partial chunk needed.
1698 # Partial chunk needed.
1699 else:
1699 else:
1700 buf.append(chunk[offset:offset + left])
1700 buf.append(chunk[offset:offset + left])
1701 self._chunkoffset += left
1701 self._chunkoffset += left
1702 left -= chunkremaining
1702 left -= chunkremaining
1703
1703
1704 return ''.join(buf)
1704 return ''.join(buf)
1705
1705
1706 def filechunkiter(f, size=131072, limit=None):
1706 def filechunkiter(f, size=131072, limit=None):
1707 """Create a generator that produces the data in the file size
1707 """Create a generator that produces the data in the file size
1708 (default 131072) bytes at a time, up to optional limit (default is
1708 (default 131072) bytes at a time, up to optional limit (default is
1709 to read all data). Chunks may be less than size bytes if the
1709 to read all data). Chunks may be less than size bytes if the
1710 chunk is the last chunk in the file, or the file is a socket or
1710 chunk is the last chunk in the file, or the file is a socket or
1711 some other type of file that sometimes reads less data than is
1711 some other type of file that sometimes reads less data than is
1712 requested."""
1712 requested."""
1713 assert size >= 0
1713 assert size >= 0
1714 assert limit is None or limit >= 0
1714 assert limit is None or limit >= 0
1715 while True:
1715 while True:
1716 if limit is None:
1716 if limit is None:
1717 nbytes = size
1717 nbytes = size
1718 else:
1718 else:
1719 nbytes = min(limit, size)
1719 nbytes = min(limit, size)
1720 s = nbytes and f.read(nbytes)
1720 s = nbytes and f.read(nbytes)
1721 if not s:
1721 if not s:
1722 break
1722 break
1723 if limit:
1723 if limit:
1724 limit -= len(s)
1724 limit -= len(s)
1725 yield s
1725 yield s
1726
1726
1727 def makedate(timestamp=None):
1727 def makedate(timestamp=None):
1728 '''Return a unix timestamp (or the current time) as a (unixtime,
1728 '''Return a unix timestamp (or the current time) as a (unixtime,
1729 offset) tuple based off the local timezone.'''
1729 offset) tuple based off the local timezone.'''
1730 if timestamp is None:
1730 if timestamp is None:
1731 timestamp = time.time()
1731 timestamp = time.time()
1732 if timestamp < 0:
1732 if timestamp < 0:
1733 hint = _("check your clock")
1733 hint = _("check your clock")
1734 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1734 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1735 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1735 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1736 datetime.datetime.fromtimestamp(timestamp))
1736 datetime.datetime.fromtimestamp(timestamp))
1737 tz = delta.days * 86400 + delta.seconds
1737 tz = delta.days * 86400 + delta.seconds
1738 return timestamp, tz
1738 return timestamp, tz
1739
1739
1740 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1740 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1741 """represent a (unixtime, offset) tuple as a localized time.
1741 """represent a (unixtime, offset) tuple as a localized time.
1742 unixtime is seconds since the epoch, and offset is the time zone's
1742 unixtime is seconds since the epoch, and offset is the time zone's
1743 number of seconds away from UTC.
1743 number of seconds away from UTC.
1744
1744
1745 >>> datestr((0, 0))
1745 >>> datestr((0, 0))
1746 'Thu Jan 01 00:00:00 1970 +0000'
1746 'Thu Jan 01 00:00:00 1970 +0000'
1747 >>> datestr((42, 0))
1747 >>> datestr((42, 0))
1748 'Thu Jan 01 00:00:42 1970 +0000'
1748 'Thu Jan 01 00:00:42 1970 +0000'
1749 >>> datestr((-42, 0))
1749 >>> datestr((-42, 0))
1750 'Wed Dec 31 23:59:18 1969 +0000'
1750 'Wed Dec 31 23:59:18 1969 +0000'
1751 >>> datestr((0x7fffffff, 0))
1751 >>> datestr((0x7fffffff, 0))
1752 'Tue Jan 19 03:14:07 2038 +0000'
1752 'Tue Jan 19 03:14:07 2038 +0000'
1753 >>> datestr((-0x80000000, 0))
1753 >>> datestr((-0x80000000, 0))
1754 'Fri Dec 13 20:45:52 1901 +0000'
1754 'Fri Dec 13 20:45:52 1901 +0000'
1755 """
1755 """
1756 t, tz = date or makedate()
1756 t, tz = date or makedate()
1757 if "%1" in format or "%2" in format or "%z" in format:
1757 if "%1" in format or "%2" in format or "%z" in format:
1758 sign = (tz > 0) and "-" or "+"
1758 sign = (tz > 0) and "-" or "+"
1759 minutes = abs(tz) // 60
1759 minutes = abs(tz) // 60
1760 q, r = divmod(minutes, 60)
1760 q, r = divmod(minutes, 60)
1761 format = format.replace("%z", "%1%2")
1761 format = format.replace("%z", "%1%2")
1762 format = format.replace("%1", "%c%02d" % (sign, q))
1762 format = format.replace("%1", "%c%02d" % (sign, q))
1763 format = format.replace("%2", "%02d" % r)
1763 format = format.replace("%2", "%02d" % r)
1764 d = t - tz
1764 d = t - tz
1765 if d > 0x7fffffff:
1765 if d > 0x7fffffff:
1766 d = 0x7fffffff
1766 d = 0x7fffffff
1767 elif d < -0x80000000:
1767 elif d < -0x80000000:
1768 d = -0x80000000
1768 d = -0x80000000
1769 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1769 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1770 # because they use the gmtime() system call which is buggy on Windows
1770 # because they use the gmtime() system call which is buggy on Windows
1771 # for negative values.
1771 # for negative values.
1772 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1772 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1773 s = t.strftime(format)
1773 s = t.strftime(format)
1774 return s
1774 return s
1775
1775
1776 def shortdate(date=None):
1776 def shortdate(date=None):
1777 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1777 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1778 return datestr(date, format='%Y-%m-%d')
1778 return datestr(date, format='%Y-%m-%d')
1779
1779
1780 def parsetimezone(s):
1780 def parsetimezone(s):
1781 """find a trailing timezone, if any, in string, and return a
1781 """find a trailing timezone, if any, in string, and return a
1782 (offset, remainder) pair"""
1782 (offset, remainder) pair"""
1783
1783
1784 if s.endswith("GMT") or s.endswith("UTC"):
1784 if s.endswith("GMT") or s.endswith("UTC"):
1785 return 0, s[:-3].rstrip()
1785 return 0, s[:-3].rstrip()
1786
1786
1787 # Unix-style timezones [+-]hhmm
1787 # Unix-style timezones [+-]hhmm
1788 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1788 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1789 sign = (s[-5] == "+") and 1 or -1
1789 sign = (s[-5] == "+") and 1 or -1
1790 hours = int(s[-4:-2])
1790 hours = int(s[-4:-2])
1791 minutes = int(s[-2:])
1791 minutes = int(s[-2:])
1792 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1792 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1793
1793
1794 # ISO8601 trailing Z
1794 # ISO8601 trailing Z
1795 if s.endswith("Z") and s[-2:-1].isdigit():
1795 if s.endswith("Z") and s[-2:-1].isdigit():
1796 return 0, s[:-1]
1796 return 0, s[:-1]
1797
1797
1798 # ISO8601-style [+-]hh:mm
1798 # ISO8601-style [+-]hh:mm
1799 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1799 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1800 s[-5:-3].isdigit() and s[-2:].isdigit()):
1800 s[-5:-3].isdigit() and s[-2:].isdigit()):
1801 sign = (s[-6] == "+") and 1 or -1
1801 sign = (s[-6] == "+") and 1 or -1
1802 hours = int(s[-5:-3])
1802 hours = int(s[-5:-3])
1803 minutes = int(s[-2:])
1803 minutes = int(s[-2:])
1804 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1804 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1805
1805
1806 return None, s
1806 return None, s
1807
1807
1808 def strdate(string, format, defaults=[]):
1808 def strdate(string, format, defaults=[]):
1809 """parse a localized time string and return a (unixtime, offset) tuple.
1809 """parse a localized time string and return a (unixtime, offset) tuple.
1810 if the string cannot be parsed, ValueError is raised."""
1810 if the string cannot be parsed, ValueError is raised."""
1811 # NOTE: unixtime = localunixtime + offset
1811 # NOTE: unixtime = localunixtime + offset
1812 offset, date = parsetimezone(string)
1812 offset, date = parsetimezone(string)
1813
1813
1814 # add missing elements from defaults
1814 # add missing elements from defaults
1815 usenow = False # default to using biased defaults
1815 usenow = False # default to using biased defaults
1816 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1816 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1817 found = [True for p in part if ("%"+p) in format]
1817 found = [True for p in part if ("%"+p) in format]
1818 if not found:
1818 if not found:
1819 date += "@" + defaults[part][usenow]
1819 date += "@" + defaults[part][usenow]
1820 format += "@%" + part[0]
1820 format += "@%" + part[0]
1821 else:
1821 else:
1822 # We've found a specific time element, less specific time
1822 # We've found a specific time element, less specific time
1823 # elements are relative to today
1823 # elements are relative to today
1824 usenow = True
1824 usenow = True
1825
1825
1826 timetuple = time.strptime(date, format)
1826 timetuple = time.strptime(date, format)
1827 localunixtime = int(calendar.timegm(timetuple))
1827 localunixtime = int(calendar.timegm(timetuple))
1828 if offset is None:
1828 if offset is None:
1829 # local timezone
1829 # local timezone
1830 unixtime = int(time.mktime(timetuple))
1830 unixtime = int(time.mktime(timetuple))
1831 offset = unixtime - localunixtime
1831 offset = unixtime - localunixtime
1832 else:
1832 else:
1833 unixtime = localunixtime + offset
1833 unixtime = localunixtime + offset
1834 return unixtime, offset
1834 return unixtime, offset
1835
1835
1836 def parsedate(date, formats=None, bias=None):
1836 def parsedate(date, formats=None, bias=None):
1837 """parse a localized date/time and return a (unixtime, offset) tuple.
1837 """parse a localized date/time and return a (unixtime, offset) tuple.
1838
1838
1839 The date may be a "unixtime offset" string or in one of the specified
1839 The date may be a "unixtime offset" string or in one of the specified
1840 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1840 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1841
1841
1842 >>> parsedate(' today ') == parsedate(\
1842 >>> parsedate(' today ') == parsedate(\
1843 datetime.date.today().strftime('%b %d'))
1843 datetime.date.today().strftime('%b %d'))
1844 True
1844 True
1845 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1845 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1846 datetime.timedelta(days=1)\
1846 datetime.timedelta(days=1)\
1847 ).strftime('%b %d'))
1847 ).strftime('%b %d'))
1848 True
1848 True
1849 >>> now, tz = makedate()
1849 >>> now, tz = makedate()
1850 >>> strnow, strtz = parsedate('now')
1850 >>> strnow, strtz = parsedate('now')
1851 >>> (strnow - now) < 1
1851 >>> (strnow - now) < 1
1852 True
1852 True
1853 >>> tz == strtz
1853 >>> tz == strtz
1854 True
1854 True
1855 """
1855 """
1856 if bias is None:
1856 if bias is None:
1857 bias = {}
1857 bias = {}
1858 if not date:
1858 if not date:
1859 return 0, 0
1859 return 0, 0
1860 if isinstance(date, tuple) and len(date) == 2:
1860 if isinstance(date, tuple) and len(date) == 2:
1861 return date
1861 return date
1862 if not formats:
1862 if not formats:
1863 formats = defaultdateformats
1863 formats = defaultdateformats
1864 date = date.strip()
1864 date = date.strip()
1865
1865
1866 if date == 'now' or date == _('now'):
1866 if date == 'now' or date == _('now'):
1867 return makedate()
1867 return makedate()
1868 if date == 'today' or date == _('today'):
1868 if date == 'today' or date == _('today'):
1869 date = datetime.date.today().strftime('%b %d')
1869 date = datetime.date.today().strftime('%b %d')
1870 elif date == 'yesterday' or date == _('yesterday'):
1870 elif date == 'yesterday' or date == _('yesterday'):
1871 date = (datetime.date.today() -
1871 date = (datetime.date.today() -
1872 datetime.timedelta(days=1)).strftime('%b %d')
1872 datetime.timedelta(days=1)).strftime('%b %d')
1873
1873
1874 try:
1874 try:
1875 when, offset = map(int, date.split(' '))
1875 when, offset = map(int, date.split(' '))
1876 except ValueError:
1876 except ValueError:
1877 # fill out defaults
1877 # fill out defaults
1878 now = makedate()
1878 now = makedate()
1879 defaults = {}
1879 defaults = {}
1880 for part in ("d", "mb", "yY", "HI", "M", "S"):
1880 for part in ("d", "mb", "yY", "HI", "M", "S"):
1881 # this piece is for rounding the specific end of unknowns
1881 # this piece is for rounding the specific end of unknowns
1882 b = bias.get(part)
1882 b = bias.get(part)
1883 if b is None:
1883 if b is None:
1884 if part[0] in "HMS":
1884 if part[0] in "HMS":
1885 b = "00"
1885 b = "00"
1886 else:
1886 else:
1887 b = "0"
1887 b = "0"
1888
1888
1889 # this piece is for matching the generic end to today's date
1889 # this piece is for matching the generic end to today's date
1890 n = datestr(now, "%" + part[0])
1890 n = datestr(now, "%" + part[0])
1891
1891
1892 defaults[part] = (b, n)
1892 defaults[part] = (b, n)
1893
1893
1894 for format in formats:
1894 for format in formats:
1895 try:
1895 try:
1896 when, offset = strdate(date, format, defaults)
1896 when, offset = strdate(date, format, defaults)
1897 except (ValueError, OverflowError):
1897 except (ValueError, OverflowError):
1898 pass
1898 pass
1899 else:
1899 else:
1900 break
1900 break
1901 else:
1901 else:
1902 raise Abort(_('invalid date: %r') % date)
1902 raise Abort(_('invalid date: %r') % date)
1903 # validate explicit (probably user-specified) date and
1903 # validate explicit (probably user-specified) date and
1904 # time zone offset. values must fit in signed 32 bits for
1904 # time zone offset. values must fit in signed 32 bits for
1905 # current 32-bit linux runtimes. timezones go from UTC-12
1905 # current 32-bit linux runtimes. timezones go from UTC-12
1906 # to UTC+14
1906 # to UTC+14
1907 if when < -0x80000000 or when > 0x7fffffff:
1907 if when < -0x80000000 or when > 0x7fffffff:
1908 raise Abort(_('date exceeds 32 bits: %d') % when)
1908 raise Abort(_('date exceeds 32 bits: %d') % when)
1909 if offset < -50400 or offset > 43200:
1909 if offset < -50400 or offset > 43200:
1910 raise Abort(_('impossible time zone offset: %d') % offset)
1910 raise Abort(_('impossible time zone offset: %d') % offset)
1911 return when, offset
1911 return when, offset
1912
1912
1913 def matchdate(date):
1913 def matchdate(date):
1914 """Return a function that matches a given date match specifier
1914 """Return a function that matches a given date match specifier
1915
1915
1916 Formats include:
1916 Formats include:
1917
1917
1918 '{date}' match a given date to the accuracy provided
1918 '{date}' match a given date to the accuracy provided
1919
1919
1920 '<{date}' on or before a given date
1920 '<{date}' on or before a given date
1921
1921
1922 '>{date}' on or after a given date
1922 '>{date}' on or after a given date
1923
1923
1924 >>> p1 = parsedate("10:29:59")
1924 >>> p1 = parsedate("10:29:59")
1925 >>> p2 = parsedate("10:30:00")
1925 >>> p2 = parsedate("10:30:00")
1926 >>> p3 = parsedate("10:30:59")
1926 >>> p3 = parsedate("10:30:59")
1927 >>> p4 = parsedate("10:31:00")
1927 >>> p4 = parsedate("10:31:00")
1928 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1928 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1929 >>> f = matchdate("10:30")
1929 >>> f = matchdate("10:30")
1930 >>> f(p1[0])
1930 >>> f(p1[0])
1931 False
1931 False
1932 >>> f(p2[0])
1932 >>> f(p2[0])
1933 True
1933 True
1934 >>> f(p3[0])
1934 >>> f(p3[0])
1935 True
1935 True
1936 >>> f(p4[0])
1936 >>> f(p4[0])
1937 False
1937 False
1938 >>> f(p5[0])
1938 >>> f(p5[0])
1939 False
1939 False
1940 """
1940 """
1941
1941
1942 def lower(date):
1942 def lower(date):
1943 d = {'mb': "1", 'd': "1"}
1943 d = {'mb': "1", 'd': "1"}
1944 return parsedate(date, extendeddateformats, d)[0]
1944 return parsedate(date, extendeddateformats, d)[0]
1945
1945
1946 def upper(date):
1946 def upper(date):
1947 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1947 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1948 for days in ("31", "30", "29"):
1948 for days in ("31", "30", "29"):
1949 try:
1949 try:
1950 d["d"] = days
1950 d["d"] = days
1951 return parsedate(date, extendeddateformats, d)[0]
1951 return parsedate(date, extendeddateformats, d)[0]
1952 except Abort:
1952 except Abort:
1953 pass
1953 pass
1954 d["d"] = "28"
1954 d["d"] = "28"
1955 return parsedate(date, extendeddateformats, d)[0]
1955 return parsedate(date, extendeddateformats, d)[0]
1956
1956
1957 date = date.strip()
1957 date = date.strip()
1958
1958
1959 if not date:
1959 if not date:
1960 raise Abort(_("dates cannot consist entirely of whitespace"))
1960 raise Abort(_("dates cannot consist entirely of whitespace"))
1961 elif date[0] == "<":
1961 elif date[0] == "<":
1962 if not date[1:]:
1962 if not date[1:]:
1963 raise Abort(_("invalid day spec, use '<DATE'"))
1963 raise Abort(_("invalid day spec, use '<DATE'"))
1964 when = upper(date[1:])
1964 when = upper(date[1:])
1965 return lambda x: x <= when
1965 return lambda x: x <= when
1966 elif date[0] == ">":
1966 elif date[0] == ">":
1967 if not date[1:]:
1967 if not date[1:]:
1968 raise Abort(_("invalid day spec, use '>DATE'"))
1968 raise Abort(_("invalid day spec, use '>DATE'"))
1969 when = lower(date[1:])
1969 when = lower(date[1:])
1970 return lambda x: x >= when
1970 return lambda x: x >= when
1971 elif date[0] == "-":
1971 elif date[0] == "-":
1972 try:
1972 try:
1973 days = int(date[1:])
1973 days = int(date[1:])
1974 except ValueError:
1974 except ValueError:
1975 raise Abort(_("invalid day spec: %s") % date[1:])
1975 raise Abort(_("invalid day spec: %s") % date[1:])
1976 if days < 0:
1976 if days < 0:
1977 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1977 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1978 % date[1:])
1978 % date[1:])
1979 when = makedate()[0] - days * 3600 * 24
1979 when = makedate()[0] - days * 3600 * 24
1980 return lambda x: x >= when
1980 return lambda x: x >= when
1981 elif " to " in date:
1981 elif " to " in date:
1982 a, b = date.split(" to ")
1982 a, b = date.split(" to ")
1983 start, stop = lower(a), upper(b)
1983 start, stop = lower(a), upper(b)
1984 return lambda x: x >= start and x <= stop
1984 return lambda x: x >= start and x <= stop
1985 else:
1985 else:
1986 start, stop = lower(date), upper(date)
1986 start, stop = lower(date), upper(date)
1987 return lambda x: x >= start and x <= stop
1987 return lambda x: x >= start and x <= stop
1988
1988
1989 def stringmatcher(pattern):
1989 def stringmatcher(pattern):
1990 """
1990 """
1991 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1991 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1992 returns the matcher name, pattern, and matcher function.
1992 returns the matcher name, pattern, and matcher function.
1993 missing or unknown prefixes are treated as literal matches.
1993 missing or unknown prefixes are treated as literal matches.
1994
1994
1995 helper for tests:
1995 helper for tests:
1996 >>> def test(pattern, *tests):
1996 >>> def test(pattern, *tests):
1997 ... kind, pattern, matcher = stringmatcher(pattern)
1997 ... kind, pattern, matcher = stringmatcher(pattern)
1998 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1998 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1999
1999
2000 exact matching (no prefix):
2000 exact matching (no prefix):
2001 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2001 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2002 ('literal', 'abcdefg', [False, False, True])
2002 ('literal', 'abcdefg', [False, False, True])
2003
2003
2004 regex matching ('re:' prefix)
2004 regex matching ('re:' prefix)
2005 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2005 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2006 ('re', 'a.+b', [False, False, True])
2006 ('re', 'a.+b', [False, False, True])
2007
2007
2008 force exact matches ('literal:' prefix)
2008 force exact matches ('literal:' prefix)
2009 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2009 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2010 ('literal', 're:foobar', [False, True])
2010 ('literal', 're:foobar', [False, True])
2011
2011
2012 unknown prefixes are ignored and treated as literals
2012 unknown prefixes are ignored and treated as literals
2013 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2013 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2014 ('literal', 'foo:bar', [False, False, True])
2014 ('literal', 'foo:bar', [False, False, True])
2015 """
2015 """
2016 if pattern.startswith('re:'):
2016 if pattern.startswith('re:'):
2017 pattern = pattern[3:]
2017 pattern = pattern[3:]
2018 try:
2018 try:
2019 regex = remod.compile(pattern)
2019 regex = remod.compile(pattern)
2020 except remod.error as e:
2020 except remod.error as e:
2021 raise error.ParseError(_('invalid regular expression: %s')
2021 raise error.ParseError(_('invalid regular expression: %s')
2022 % e)
2022 % e)
2023 return 're', pattern, regex.search
2023 return 're', pattern, regex.search
2024 elif pattern.startswith('literal:'):
2024 elif pattern.startswith('literal:'):
2025 pattern = pattern[8:]
2025 pattern = pattern[8:]
2026 return 'literal', pattern, pattern.__eq__
2026 return 'literal', pattern, pattern.__eq__
2027
2027
2028 def shortuser(user):
2028 def shortuser(user):
2029 """Return a short representation of a user name or email address."""
2029 """Return a short representation of a user name or email address."""
2030 f = user.find('@')
2030 f = user.find('@')
2031 if f >= 0:
2031 if f >= 0:
2032 user = user[:f]
2032 user = user[:f]
2033 f = user.find('<')
2033 f = user.find('<')
2034 if f >= 0:
2034 if f >= 0:
2035 user = user[f + 1:]
2035 user = user[f + 1:]
2036 f = user.find(' ')
2036 f = user.find(' ')
2037 if f >= 0:
2037 if f >= 0:
2038 user = user[:f]
2038 user = user[:f]
2039 f = user.find('.')
2039 f = user.find('.')
2040 if f >= 0:
2040 if f >= 0:
2041 user = user[:f]
2041 user = user[:f]
2042 return user
2042 return user
2043
2043
2044 def emailuser(user):
2044 def emailuser(user):
2045 """Return the user portion of an email address."""
2045 """Return the user portion of an email address."""
2046 f = user.find('@')
2046 f = user.find('@')
2047 if f >= 0:
2047 if f >= 0:
2048 user = user[:f]
2048 user = user[:f]
2049 f = user.find('<')
2049 f = user.find('<')
2050 if f >= 0:
2050 if f >= 0:
2051 user = user[f + 1:]
2051 user = user[f + 1:]
2052 return user
2052 return user
2053
2053
2054 def email(author):
2054 def email(author):
2055 '''get email of author.'''
2055 '''get email of author.'''
2056 r = author.find('>')
2056 r = author.find('>')
2057 if r == -1:
2057 if r == -1:
2058 r = None
2058 r = None
2059 return author[author.find('<') + 1:r]
2059 return author[author.find('<') + 1:r]
2060
2060
2061 def ellipsis(text, maxlength=400):
2061 def ellipsis(text, maxlength=400):
2062 """Trim string to at most maxlength (default: 400) columns in display."""
2062 """Trim string to at most maxlength (default: 400) columns in display."""
2063 return encoding.trim(text, maxlength, ellipsis='...')
2063 return encoding.trim(text, maxlength, ellipsis='...')
2064
2064
2065 def unitcountfn(*unittable):
2065 def unitcountfn(*unittable):
2066 '''return a function that renders a readable count of some quantity'''
2066 '''return a function that renders a readable count of some quantity'''
2067
2067
2068 def go(count):
2068 def go(count):
2069 for multiplier, divisor, format in unittable:
2069 for multiplier, divisor, format in unittable:
2070 if count >= divisor * multiplier:
2070 if count >= divisor * multiplier:
2071 return format % (count / float(divisor))
2071 return format % (count / float(divisor))
2072 return unittable[-1][2] % count
2072 return unittable[-1][2] % count
2073
2073
2074 return go
2074 return go
2075
2075
2076 bytecount = unitcountfn(
2076 bytecount = unitcountfn(
2077 (100, 1 << 30, _('%.0f GB')),
2077 (100, 1 << 30, _('%.0f GB')),
2078 (10, 1 << 30, _('%.1f GB')),
2078 (10, 1 << 30, _('%.1f GB')),
2079 (1, 1 << 30, _('%.2f GB')),
2079 (1, 1 << 30, _('%.2f GB')),
2080 (100, 1 << 20, _('%.0f MB')),
2080 (100, 1 << 20, _('%.0f MB')),
2081 (10, 1 << 20, _('%.1f MB')),
2081 (10, 1 << 20, _('%.1f MB')),
2082 (1, 1 << 20, _('%.2f MB')),
2082 (1, 1 << 20, _('%.2f MB')),
2083 (100, 1 << 10, _('%.0f KB')),
2083 (100, 1 << 10, _('%.0f KB')),
2084 (10, 1 << 10, _('%.1f KB')),
2084 (10, 1 << 10, _('%.1f KB')),
2085 (1, 1 << 10, _('%.2f KB')),
2085 (1, 1 << 10, _('%.2f KB')),
2086 (1, 1, _('%.0f bytes')),
2086 (1, 1, _('%.0f bytes')),
2087 )
2087 )
2088
2088
2089 def uirepr(s):
2089 def uirepr(s):
2090 # Avoid double backslash in Windows path repr()
2090 # Avoid double backslash in Windows path repr()
2091 return repr(s).replace('\\\\', '\\')
2091 return repr(s).replace('\\\\', '\\')
2092
2092
2093 # delay import of textwrap
2093 # delay import of textwrap
2094 def MBTextWrapper(**kwargs):
2094 def MBTextWrapper(**kwargs):
2095 class tw(textwrap.TextWrapper):
2095 class tw(textwrap.TextWrapper):
2096 """
2096 """
2097 Extend TextWrapper for width-awareness.
2097 Extend TextWrapper for width-awareness.
2098
2098
2099 Neither number of 'bytes' in any encoding nor 'characters' is
2099 Neither number of 'bytes' in any encoding nor 'characters' is
2100 appropriate to calculate terminal columns for specified string.
2100 appropriate to calculate terminal columns for specified string.
2101
2101
2102 Original TextWrapper implementation uses built-in 'len()' directly,
2102 Original TextWrapper implementation uses built-in 'len()' directly,
2103 so overriding is needed to use width information of each characters.
2103 so overriding is needed to use width information of each characters.
2104
2104
2105 In addition, characters classified into 'ambiguous' width are
2105 In addition, characters classified into 'ambiguous' width are
2106 treated as wide in East Asian area, but as narrow in other.
2106 treated as wide in East Asian area, but as narrow in other.
2107
2107
2108 This requires use decision to determine width of such characters.
2108 This requires use decision to determine width of such characters.
2109 """
2109 """
2110 def _cutdown(self, ucstr, space_left):
2110 def _cutdown(self, ucstr, space_left):
2111 l = 0
2111 l = 0
2112 colwidth = encoding.ucolwidth
2112 colwidth = encoding.ucolwidth
2113 for i in xrange(len(ucstr)):
2113 for i in xrange(len(ucstr)):
2114 l += colwidth(ucstr[i])
2114 l += colwidth(ucstr[i])
2115 if space_left < l:
2115 if space_left < l:
2116 return (ucstr[:i], ucstr[i:])
2116 return (ucstr[:i], ucstr[i:])
2117 return ucstr, ''
2117 return ucstr, ''
2118
2118
2119 # overriding of base class
2119 # overriding of base class
2120 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2120 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2121 space_left = max(width - cur_len, 1)
2121 space_left = max(width - cur_len, 1)
2122
2122
2123 if self.break_long_words:
2123 if self.break_long_words:
2124 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2124 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2125 cur_line.append(cut)
2125 cur_line.append(cut)
2126 reversed_chunks[-1] = res
2126 reversed_chunks[-1] = res
2127 elif not cur_line:
2127 elif not cur_line:
2128 cur_line.append(reversed_chunks.pop())
2128 cur_line.append(reversed_chunks.pop())
2129
2129
2130 # this overriding code is imported from TextWrapper of Python 2.6
2130 # this overriding code is imported from TextWrapper of Python 2.6
2131 # to calculate columns of string by 'encoding.ucolwidth()'
2131 # to calculate columns of string by 'encoding.ucolwidth()'
2132 def _wrap_chunks(self, chunks):
2132 def _wrap_chunks(self, chunks):
2133 colwidth = encoding.ucolwidth
2133 colwidth = encoding.ucolwidth
2134
2134
2135 lines = []
2135 lines = []
2136 if self.width <= 0:
2136 if self.width <= 0:
2137 raise ValueError("invalid width %r (must be > 0)" % self.width)
2137 raise ValueError("invalid width %r (must be > 0)" % self.width)
2138
2138
2139 # Arrange in reverse order so items can be efficiently popped
2139 # Arrange in reverse order so items can be efficiently popped
2140 # from a stack of chucks.
2140 # from a stack of chucks.
2141 chunks.reverse()
2141 chunks.reverse()
2142
2142
2143 while chunks:
2143 while chunks:
2144
2144
2145 # Start the list of chunks that will make up the current line.
2145 # Start the list of chunks that will make up the current line.
2146 # cur_len is just the length of all the chunks in cur_line.
2146 # cur_len is just the length of all the chunks in cur_line.
2147 cur_line = []
2147 cur_line = []
2148 cur_len = 0
2148 cur_len = 0
2149
2149
2150 # Figure out which static string will prefix this line.
2150 # Figure out which static string will prefix this line.
2151 if lines:
2151 if lines:
2152 indent = self.subsequent_indent
2152 indent = self.subsequent_indent
2153 else:
2153 else:
2154 indent = self.initial_indent
2154 indent = self.initial_indent
2155
2155
2156 # Maximum width for this line.
2156 # Maximum width for this line.
2157 width = self.width - len(indent)
2157 width = self.width - len(indent)
2158
2158
2159 # First chunk on line is whitespace -- drop it, unless this
2159 # First chunk on line is whitespace -- drop it, unless this
2160 # is the very beginning of the text (i.e. no lines started yet).
2160 # is the very beginning of the text (i.e. no lines started yet).
2161 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2161 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2162 del chunks[-1]
2162 del chunks[-1]
2163
2163
2164 while chunks:
2164 while chunks:
2165 l = colwidth(chunks[-1])
2165 l = colwidth(chunks[-1])
2166
2166
2167 # Can at least squeeze this chunk onto the current line.
2167 # Can at least squeeze this chunk onto the current line.
2168 if cur_len + l <= width:
2168 if cur_len + l <= width:
2169 cur_line.append(chunks.pop())
2169 cur_line.append(chunks.pop())
2170 cur_len += l
2170 cur_len += l
2171
2171
2172 # Nope, this line is full.
2172 # Nope, this line is full.
2173 else:
2173 else:
2174 break
2174 break
2175
2175
2176 # The current line is full, and the next chunk is too big to
2176 # The current line is full, and the next chunk is too big to
2177 # fit on *any* line (not just this one).
2177 # fit on *any* line (not just this one).
2178 if chunks and colwidth(chunks[-1]) > width:
2178 if chunks and colwidth(chunks[-1]) > width:
2179 self._handle_long_word(chunks, cur_line, cur_len, width)
2179 self._handle_long_word(chunks, cur_line, cur_len, width)
2180
2180
2181 # If the last chunk on this line is all whitespace, drop it.
2181 # If the last chunk on this line is all whitespace, drop it.
2182 if (self.drop_whitespace and
2182 if (self.drop_whitespace and
2183 cur_line and cur_line[-1].strip() == ''):
2183 cur_line and cur_line[-1].strip() == ''):
2184 del cur_line[-1]
2184 del cur_line[-1]
2185
2185
2186 # Convert current line back to a string and store it in list
2186 # Convert current line back to a string and store it in list
2187 # of all lines (return value).
2187 # of all lines (return value).
2188 if cur_line:
2188 if cur_line:
2189 lines.append(indent + ''.join(cur_line))
2189 lines.append(indent + ''.join(cur_line))
2190
2190
2191 return lines
2191 return lines
2192
2192
2193 global MBTextWrapper
2193 global MBTextWrapper
2194 MBTextWrapper = tw
2194 MBTextWrapper = tw
2195 return tw(**kwargs)
2195 return tw(**kwargs)
2196
2196
2197 def wrap(line, width, initindent='', hangindent=''):
2197 def wrap(line, width, initindent='', hangindent=''):
2198 maxindent = max(len(hangindent), len(initindent))
2198 maxindent = max(len(hangindent), len(initindent))
2199 if width <= maxindent:
2199 if width <= maxindent:
2200 # adjust for weird terminal size
2200 # adjust for weird terminal size
2201 width = max(78, maxindent + 1)
2201 width = max(78, maxindent + 1)
2202 line = line.decode(encoding.encoding, encoding.encodingmode)
2202 line = line.decode(encoding.encoding, encoding.encodingmode)
2203 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2203 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
2204 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2204 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
2205 wrapper = MBTextWrapper(width=width,
2205 wrapper = MBTextWrapper(width=width,
2206 initial_indent=initindent,
2206 initial_indent=initindent,
2207 subsequent_indent=hangindent)
2207 subsequent_indent=hangindent)
2208 return wrapper.fill(line).encode(encoding.encoding)
2208 return wrapper.fill(line).encode(encoding.encoding)
2209
2209
2210 if (pyplatform.python_implementation() == 'CPython' and
2210 if (pyplatform.python_implementation() == 'CPython' and
2211 sys.version_info < (3, 0)):
2211 sys.version_info < (3, 0)):
2212 # There is an issue in CPython that some IO methods do not handle EINTR
2212 # There is an issue in CPython that some IO methods do not handle EINTR
2213 # correctly. The following table shows what CPython version (and functions)
2213 # correctly. The following table shows what CPython version (and functions)
2214 # are affected (buggy: has the EINTR bug, okay: otherwise):
2214 # are affected (buggy: has the EINTR bug, okay: otherwise):
2215 #
2215 #
2216 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2216 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2217 # --------------------------------------------------
2217 # --------------------------------------------------
2218 # fp.__iter__ | buggy | buggy | okay
2218 # fp.__iter__ | buggy | buggy | okay
2219 # fp.read* | buggy | okay [1] | okay
2219 # fp.read* | buggy | okay [1] | okay
2220 #
2220 #
2221 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2221 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2222 #
2222 #
2223 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2223 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2224 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2224 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2225 #
2225 #
2226 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2226 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2227 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2227 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2228 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2228 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2229 # fp.__iter__ but not other fp.read* methods.
2229 # fp.__iter__ but not other fp.read* methods.
2230 #
2230 #
2231 # On modern systems like Linux, the "read" syscall cannot be interrupted
2231 # On modern systems like Linux, the "read" syscall cannot be interrupted
2232 # when reading "fast" files like on-disk files. So the EINTR issue only
2232 # when reading "fast" files like on-disk files. So the EINTR issue only
2233 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2233 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2234 # files approximately as "fast" files and use the fast (unsafe) code path,
2234 # files approximately as "fast" files and use the fast (unsafe) code path,
2235 # to minimize the performance impact.
2235 # to minimize the performance impact.
2236 if sys.version_info >= (2, 7, 4):
2236 if sys.version_info >= (2, 7, 4):
2237 # fp.readline deals with EINTR correctly, use it as a workaround.
2237 # fp.readline deals with EINTR correctly, use it as a workaround.
2238 def _safeiterfile(fp):
2238 def _safeiterfile(fp):
2239 return iter(fp.readline, '')
2239 return iter(fp.readline, '')
2240 else:
2240 else:
2241 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2241 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2242 # note: this may block longer than necessary because of bufsize.
2242 # note: this may block longer than necessary because of bufsize.
2243 def _safeiterfile(fp, bufsize=4096):
2243 def _safeiterfile(fp, bufsize=4096):
2244 fd = fp.fileno()
2244 fd = fp.fileno()
2245 line = ''
2245 line = ''
2246 while True:
2246 while True:
2247 try:
2247 try:
2248 buf = os.read(fd, bufsize)
2248 buf = os.read(fd, bufsize)
2249 except OSError as ex:
2249 except OSError as ex:
2250 # os.read only raises EINTR before any data is read
2250 # os.read only raises EINTR before any data is read
2251 if ex.errno == errno.EINTR:
2251 if ex.errno == errno.EINTR:
2252 continue
2252 continue
2253 else:
2253 else:
2254 raise
2254 raise
2255 line += buf
2255 line += buf
2256 if '\n' in buf:
2256 if '\n' in buf:
2257 splitted = line.splitlines(True)
2257 splitted = line.splitlines(True)
2258 line = ''
2258 line = ''
2259 for l in splitted:
2259 for l in splitted:
2260 if l[-1] == '\n':
2260 if l[-1] == '\n':
2261 yield l
2261 yield l
2262 else:
2262 else:
2263 line = l
2263 line = l
2264 if not buf:
2264 if not buf:
2265 break
2265 break
2266 if line:
2266 if line:
2267 yield line
2267 yield line
2268
2268
2269 def iterfile(fp):
2269 def iterfile(fp):
2270 fastpath = True
2270 fastpath = True
2271 if type(fp) is file:
2271 if type(fp) is file:
2272 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2272 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2273 if fastpath:
2273 if fastpath:
2274 return fp
2274 return fp
2275 else:
2275 else:
2276 return _safeiterfile(fp)
2276 return _safeiterfile(fp)
2277 else:
2277 else:
2278 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2278 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2279 def iterfile(fp):
2279 def iterfile(fp):
2280 return fp
2280 return fp
2281
2281
2282 def iterlines(iterator):
2282 def iterlines(iterator):
2283 for chunk in iterator:
2283 for chunk in iterator:
2284 for line in chunk.splitlines():
2284 for line in chunk.splitlines():
2285 yield line
2285 yield line
2286
2286
2287 def expandpath(path):
2287 def expandpath(path):
2288 return os.path.expanduser(os.path.expandvars(path))
2288 return os.path.expanduser(os.path.expandvars(path))
2289
2289
2290 def hgcmd():
2290 def hgcmd():
2291 """Return the command used to execute current hg
2291 """Return the command used to execute current hg
2292
2292
2293 This is different from hgexecutable() because on Windows we want
2293 This is different from hgexecutable() because on Windows we want
2294 to avoid things opening new shell windows like batch files, so we
2294 to avoid things opening new shell windows like batch files, so we
2295 get either the python call or current executable.
2295 get either the python call or current executable.
2296 """
2296 """
2297 if mainfrozen():
2297 if mainfrozen():
2298 if getattr(sys, 'frozen', None) == 'macosx_app':
2298 if getattr(sys, 'frozen', None) == 'macosx_app':
2299 # Env variable set by py2app
2299 # Env variable set by py2app
2300 return [encoding.environ['EXECUTABLEPATH']]
2300 return [encoding.environ['EXECUTABLEPATH']]
2301 else:
2301 else:
2302 return [sys.executable]
2302 return [pycompat.sysexecutable]
2303 return gethgcmd()
2303 return gethgcmd()
2304
2304
2305 def rundetached(args, condfn):
2305 def rundetached(args, condfn):
2306 """Execute the argument list in a detached process.
2306 """Execute the argument list in a detached process.
2307
2307
2308 condfn is a callable which is called repeatedly and should return
2308 condfn is a callable which is called repeatedly and should return
2309 True once the child process is known to have started successfully.
2309 True once the child process is known to have started successfully.
2310 At this point, the child process PID is returned. If the child
2310 At this point, the child process PID is returned. If the child
2311 process fails to start or finishes before condfn() evaluates to
2311 process fails to start or finishes before condfn() evaluates to
2312 True, return -1.
2312 True, return -1.
2313 """
2313 """
2314 # Windows case is easier because the child process is either
2314 # Windows case is easier because the child process is either
2315 # successfully starting and validating the condition or exiting
2315 # successfully starting and validating the condition or exiting
2316 # on failure. We just poll on its PID. On Unix, if the child
2316 # on failure. We just poll on its PID. On Unix, if the child
2317 # process fails to start, it will be left in a zombie state until
2317 # process fails to start, it will be left in a zombie state until
2318 # the parent wait on it, which we cannot do since we expect a long
2318 # the parent wait on it, which we cannot do since we expect a long
2319 # running process on success. Instead we listen for SIGCHLD telling
2319 # running process on success. Instead we listen for SIGCHLD telling
2320 # us our child process terminated.
2320 # us our child process terminated.
2321 terminated = set()
2321 terminated = set()
2322 def handler(signum, frame):
2322 def handler(signum, frame):
2323 terminated.add(os.wait())
2323 terminated.add(os.wait())
2324 prevhandler = None
2324 prevhandler = None
2325 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2325 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2326 if SIGCHLD is not None:
2326 if SIGCHLD is not None:
2327 prevhandler = signal.signal(SIGCHLD, handler)
2327 prevhandler = signal.signal(SIGCHLD, handler)
2328 try:
2328 try:
2329 pid = spawndetached(args)
2329 pid = spawndetached(args)
2330 while not condfn():
2330 while not condfn():
2331 if ((pid in terminated or not testpid(pid))
2331 if ((pid in terminated or not testpid(pid))
2332 and not condfn()):
2332 and not condfn()):
2333 return -1
2333 return -1
2334 time.sleep(0.1)
2334 time.sleep(0.1)
2335 return pid
2335 return pid
2336 finally:
2336 finally:
2337 if prevhandler is not None:
2337 if prevhandler is not None:
2338 signal.signal(signal.SIGCHLD, prevhandler)
2338 signal.signal(signal.SIGCHLD, prevhandler)
2339
2339
2340 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2340 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2341 """Return the result of interpolating items in the mapping into string s.
2341 """Return the result of interpolating items in the mapping into string s.
2342
2342
2343 prefix is a single character string, or a two character string with
2343 prefix is a single character string, or a two character string with
2344 a backslash as the first character if the prefix needs to be escaped in
2344 a backslash as the first character if the prefix needs to be escaped in
2345 a regular expression.
2345 a regular expression.
2346
2346
2347 fn is an optional function that will be applied to the replacement text
2347 fn is an optional function that will be applied to the replacement text
2348 just before replacement.
2348 just before replacement.
2349
2349
2350 escape_prefix is an optional flag that allows using doubled prefix for
2350 escape_prefix is an optional flag that allows using doubled prefix for
2351 its escaping.
2351 its escaping.
2352 """
2352 """
2353 fn = fn or (lambda s: s)
2353 fn = fn or (lambda s: s)
2354 patterns = '|'.join(mapping.keys())
2354 patterns = '|'.join(mapping.keys())
2355 if escape_prefix:
2355 if escape_prefix:
2356 patterns += '|' + prefix
2356 patterns += '|' + prefix
2357 if len(prefix) > 1:
2357 if len(prefix) > 1:
2358 prefix_char = prefix[1:]
2358 prefix_char = prefix[1:]
2359 else:
2359 else:
2360 prefix_char = prefix
2360 prefix_char = prefix
2361 mapping[prefix_char] = prefix_char
2361 mapping[prefix_char] = prefix_char
2362 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2362 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2363 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2363 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2364
2364
2365 def getport(port):
2365 def getport(port):
2366 """Return the port for a given network service.
2366 """Return the port for a given network service.
2367
2367
2368 If port is an integer, it's returned as is. If it's a string, it's
2368 If port is an integer, it's returned as is. If it's a string, it's
2369 looked up using socket.getservbyname(). If there's no matching
2369 looked up using socket.getservbyname(). If there's no matching
2370 service, error.Abort is raised.
2370 service, error.Abort is raised.
2371 """
2371 """
2372 try:
2372 try:
2373 return int(port)
2373 return int(port)
2374 except ValueError:
2374 except ValueError:
2375 pass
2375 pass
2376
2376
2377 try:
2377 try:
2378 return socket.getservbyname(port)
2378 return socket.getservbyname(port)
2379 except socket.error:
2379 except socket.error:
2380 raise Abort(_("no port number associated with service '%s'") % port)
2380 raise Abort(_("no port number associated with service '%s'") % port)
2381
2381
2382 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2382 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2383 '0': False, 'no': False, 'false': False, 'off': False,
2383 '0': False, 'no': False, 'false': False, 'off': False,
2384 'never': False}
2384 'never': False}
2385
2385
2386 def parsebool(s):
2386 def parsebool(s):
2387 """Parse s into a boolean.
2387 """Parse s into a boolean.
2388
2388
2389 If s is not a valid boolean, returns None.
2389 If s is not a valid boolean, returns None.
2390 """
2390 """
2391 return _booleans.get(s.lower(), None)
2391 return _booleans.get(s.lower(), None)
2392
2392
2393 _hextochr = dict((a + b, chr(int(a + b, 16)))
2393 _hextochr = dict((a + b, chr(int(a + b, 16)))
2394 for a in string.hexdigits for b in string.hexdigits)
2394 for a in string.hexdigits for b in string.hexdigits)
2395
2395
2396 class url(object):
2396 class url(object):
2397 r"""Reliable URL parser.
2397 r"""Reliable URL parser.
2398
2398
2399 This parses URLs and provides attributes for the following
2399 This parses URLs and provides attributes for the following
2400 components:
2400 components:
2401
2401
2402 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2402 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2403
2403
2404 Missing components are set to None. The only exception is
2404 Missing components are set to None. The only exception is
2405 fragment, which is set to '' if present but empty.
2405 fragment, which is set to '' if present but empty.
2406
2406
2407 If parsefragment is False, fragment is included in query. If
2407 If parsefragment is False, fragment is included in query. If
2408 parsequery is False, query is included in path. If both are
2408 parsequery is False, query is included in path. If both are
2409 False, both fragment and query are included in path.
2409 False, both fragment and query are included in path.
2410
2410
2411 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2411 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2412
2412
2413 Note that for backward compatibility reasons, bundle URLs do not
2413 Note that for backward compatibility reasons, bundle URLs do not
2414 take host names. That means 'bundle://../' has a path of '../'.
2414 take host names. That means 'bundle://../' has a path of '../'.
2415
2415
2416 Examples:
2416 Examples:
2417
2417
2418 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2418 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2419 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2419 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2420 >>> url('ssh://[::1]:2200//home/joe/repo')
2420 >>> url('ssh://[::1]:2200//home/joe/repo')
2421 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2421 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2422 >>> url('file:///home/joe/repo')
2422 >>> url('file:///home/joe/repo')
2423 <url scheme: 'file', path: '/home/joe/repo'>
2423 <url scheme: 'file', path: '/home/joe/repo'>
2424 >>> url('file:///c:/temp/foo/')
2424 >>> url('file:///c:/temp/foo/')
2425 <url scheme: 'file', path: 'c:/temp/foo/'>
2425 <url scheme: 'file', path: 'c:/temp/foo/'>
2426 >>> url('bundle:foo')
2426 >>> url('bundle:foo')
2427 <url scheme: 'bundle', path: 'foo'>
2427 <url scheme: 'bundle', path: 'foo'>
2428 >>> url('bundle://../foo')
2428 >>> url('bundle://../foo')
2429 <url scheme: 'bundle', path: '../foo'>
2429 <url scheme: 'bundle', path: '../foo'>
2430 >>> url(r'c:\foo\bar')
2430 >>> url(r'c:\foo\bar')
2431 <url path: 'c:\\foo\\bar'>
2431 <url path: 'c:\\foo\\bar'>
2432 >>> url(r'\\blah\blah\blah')
2432 >>> url(r'\\blah\blah\blah')
2433 <url path: '\\\\blah\\blah\\blah'>
2433 <url path: '\\\\blah\\blah\\blah'>
2434 >>> url(r'\\blah\blah\blah#baz')
2434 >>> url(r'\\blah\blah\blah#baz')
2435 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2435 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2436 >>> url(r'file:///C:\users\me')
2436 >>> url(r'file:///C:\users\me')
2437 <url scheme: 'file', path: 'C:\\users\\me'>
2437 <url scheme: 'file', path: 'C:\\users\\me'>
2438
2438
2439 Authentication credentials:
2439 Authentication credentials:
2440
2440
2441 >>> url('ssh://joe:xyz@x/repo')
2441 >>> url('ssh://joe:xyz@x/repo')
2442 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2442 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2443 >>> url('ssh://joe@x/repo')
2443 >>> url('ssh://joe@x/repo')
2444 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2444 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2445
2445
2446 Query strings and fragments:
2446 Query strings and fragments:
2447
2447
2448 >>> url('http://host/a?b#c')
2448 >>> url('http://host/a?b#c')
2449 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2449 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2450 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2450 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2451 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2451 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2452
2452
2453 Empty path:
2453 Empty path:
2454
2454
2455 >>> url('')
2455 >>> url('')
2456 <url path: ''>
2456 <url path: ''>
2457 >>> url('#a')
2457 >>> url('#a')
2458 <url path: '', fragment: 'a'>
2458 <url path: '', fragment: 'a'>
2459 >>> url('http://host/')
2459 >>> url('http://host/')
2460 <url scheme: 'http', host: 'host', path: ''>
2460 <url scheme: 'http', host: 'host', path: ''>
2461 >>> url('http://host/#a')
2461 >>> url('http://host/#a')
2462 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2462 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2463
2463
2464 Only scheme:
2464 Only scheme:
2465
2465
2466 >>> url('http:')
2466 >>> url('http:')
2467 <url scheme: 'http'>
2467 <url scheme: 'http'>
2468 """
2468 """
2469
2469
2470 _safechars = "!~*'()+"
2470 _safechars = "!~*'()+"
2471 _safepchars = "/!~*'()+:\\"
2471 _safepchars = "/!~*'()+:\\"
2472 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2472 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2473
2473
2474 def __init__(self, path, parsequery=True, parsefragment=True):
2474 def __init__(self, path, parsequery=True, parsefragment=True):
2475 # We slowly chomp away at path until we have only the path left
2475 # We slowly chomp away at path until we have only the path left
2476 self.scheme = self.user = self.passwd = self.host = None
2476 self.scheme = self.user = self.passwd = self.host = None
2477 self.port = self.path = self.query = self.fragment = None
2477 self.port = self.path = self.query = self.fragment = None
2478 self._localpath = True
2478 self._localpath = True
2479 self._hostport = ''
2479 self._hostport = ''
2480 self._origpath = path
2480 self._origpath = path
2481
2481
2482 if parsefragment and '#' in path:
2482 if parsefragment and '#' in path:
2483 path, self.fragment = path.split('#', 1)
2483 path, self.fragment = path.split('#', 1)
2484
2484
2485 # special case for Windows drive letters and UNC paths
2485 # special case for Windows drive letters and UNC paths
2486 if hasdriveletter(path) or path.startswith('\\\\'):
2486 if hasdriveletter(path) or path.startswith('\\\\'):
2487 self.path = path
2487 self.path = path
2488 return
2488 return
2489
2489
2490 # For compatibility reasons, we can't handle bundle paths as
2490 # For compatibility reasons, we can't handle bundle paths as
2491 # normal URLS
2491 # normal URLS
2492 if path.startswith('bundle:'):
2492 if path.startswith('bundle:'):
2493 self.scheme = 'bundle'
2493 self.scheme = 'bundle'
2494 path = path[7:]
2494 path = path[7:]
2495 if path.startswith('//'):
2495 if path.startswith('//'):
2496 path = path[2:]
2496 path = path[2:]
2497 self.path = path
2497 self.path = path
2498 return
2498 return
2499
2499
2500 if self._matchscheme(path):
2500 if self._matchscheme(path):
2501 parts = path.split(':', 1)
2501 parts = path.split(':', 1)
2502 if parts[0]:
2502 if parts[0]:
2503 self.scheme, path = parts
2503 self.scheme, path = parts
2504 self._localpath = False
2504 self._localpath = False
2505
2505
2506 if not path:
2506 if not path:
2507 path = None
2507 path = None
2508 if self._localpath:
2508 if self._localpath:
2509 self.path = ''
2509 self.path = ''
2510 return
2510 return
2511 else:
2511 else:
2512 if self._localpath:
2512 if self._localpath:
2513 self.path = path
2513 self.path = path
2514 return
2514 return
2515
2515
2516 if parsequery and '?' in path:
2516 if parsequery and '?' in path:
2517 path, self.query = path.split('?', 1)
2517 path, self.query = path.split('?', 1)
2518 if not path:
2518 if not path:
2519 path = None
2519 path = None
2520 if not self.query:
2520 if not self.query:
2521 self.query = None
2521 self.query = None
2522
2522
2523 # // is required to specify a host/authority
2523 # // is required to specify a host/authority
2524 if path and path.startswith('//'):
2524 if path and path.startswith('//'):
2525 parts = path[2:].split('/', 1)
2525 parts = path[2:].split('/', 1)
2526 if len(parts) > 1:
2526 if len(parts) > 1:
2527 self.host, path = parts
2527 self.host, path = parts
2528 else:
2528 else:
2529 self.host = parts[0]
2529 self.host = parts[0]
2530 path = None
2530 path = None
2531 if not self.host:
2531 if not self.host:
2532 self.host = None
2532 self.host = None
2533 # path of file:///d is /d
2533 # path of file:///d is /d
2534 # path of file:///d:/ is d:/, not /d:/
2534 # path of file:///d:/ is d:/, not /d:/
2535 if path and not hasdriveletter(path):
2535 if path and not hasdriveletter(path):
2536 path = '/' + path
2536 path = '/' + path
2537
2537
2538 if self.host and '@' in self.host:
2538 if self.host and '@' in self.host:
2539 self.user, self.host = self.host.rsplit('@', 1)
2539 self.user, self.host = self.host.rsplit('@', 1)
2540 if ':' in self.user:
2540 if ':' in self.user:
2541 self.user, self.passwd = self.user.split(':', 1)
2541 self.user, self.passwd = self.user.split(':', 1)
2542 if not self.host:
2542 if not self.host:
2543 self.host = None
2543 self.host = None
2544
2544
2545 # Don't split on colons in IPv6 addresses without ports
2545 # Don't split on colons in IPv6 addresses without ports
2546 if (self.host and ':' in self.host and
2546 if (self.host and ':' in self.host and
2547 not (self.host.startswith('[') and self.host.endswith(']'))):
2547 not (self.host.startswith('[') and self.host.endswith(']'))):
2548 self._hostport = self.host
2548 self._hostport = self.host
2549 self.host, self.port = self.host.rsplit(':', 1)
2549 self.host, self.port = self.host.rsplit(':', 1)
2550 if not self.host:
2550 if not self.host:
2551 self.host = None
2551 self.host = None
2552
2552
2553 if (self.host and self.scheme == 'file' and
2553 if (self.host and self.scheme == 'file' and
2554 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2554 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2555 raise Abort(_('file:// URLs can only refer to localhost'))
2555 raise Abort(_('file:// URLs can only refer to localhost'))
2556
2556
2557 self.path = path
2557 self.path = path
2558
2558
2559 # leave the query string escaped
2559 # leave the query string escaped
2560 for a in ('user', 'passwd', 'host', 'port',
2560 for a in ('user', 'passwd', 'host', 'port',
2561 'path', 'fragment'):
2561 'path', 'fragment'):
2562 v = getattr(self, a)
2562 v = getattr(self, a)
2563 if v is not None:
2563 if v is not None:
2564 setattr(self, a, pycompat.urlunquote(v))
2564 setattr(self, a, pycompat.urlunquote(v))
2565
2565
2566 def __repr__(self):
2566 def __repr__(self):
2567 attrs = []
2567 attrs = []
2568 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2568 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2569 'query', 'fragment'):
2569 'query', 'fragment'):
2570 v = getattr(self, a)
2570 v = getattr(self, a)
2571 if v is not None:
2571 if v is not None:
2572 attrs.append('%s: %r' % (a, v))
2572 attrs.append('%s: %r' % (a, v))
2573 return '<url %s>' % ', '.join(attrs)
2573 return '<url %s>' % ', '.join(attrs)
2574
2574
2575 def __str__(self):
2575 def __str__(self):
2576 r"""Join the URL's components back into a URL string.
2576 r"""Join the URL's components back into a URL string.
2577
2577
2578 Examples:
2578 Examples:
2579
2579
2580 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2580 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2581 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2581 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2582 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2582 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2583 'http://user:pw@host:80/?foo=bar&baz=42'
2583 'http://user:pw@host:80/?foo=bar&baz=42'
2584 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2584 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2585 'http://user:pw@host:80/?foo=bar%3dbaz'
2585 'http://user:pw@host:80/?foo=bar%3dbaz'
2586 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2586 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2587 'ssh://user:pw@[::1]:2200//home/joe#'
2587 'ssh://user:pw@[::1]:2200//home/joe#'
2588 >>> str(url('http://localhost:80//'))
2588 >>> str(url('http://localhost:80//'))
2589 'http://localhost:80//'
2589 'http://localhost:80//'
2590 >>> str(url('http://localhost:80/'))
2590 >>> str(url('http://localhost:80/'))
2591 'http://localhost:80/'
2591 'http://localhost:80/'
2592 >>> str(url('http://localhost:80'))
2592 >>> str(url('http://localhost:80'))
2593 'http://localhost:80/'
2593 'http://localhost:80/'
2594 >>> str(url('bundle:foo'))
2594 >>> str(url('bundle:foo'))
2595 'bundle:foo'
2595 'bundle:foo'
2596 >>> str(url('bundle://../foo'))
2596 >>> str(url('bundle://../foo'))
2597 'bundle:../foo'
2597 'bundle:../foo'
2598 >>> str(url('path'))
2598 >>> str(url('path'))
2599 'path'
2599 'path'
2600 >>> str(url('file:///tmp/foo/bar'))
2600 >>> str(url('file:///tmp/foo/bar'))
2601 'file:///tmp/foo/bar'
2601 'file:///tmp/foo/bar'
2602 >>> str(url('file:///c:/tmp/foo/bar'))
2602 >>> str(url('file:///c:/tmp/foo/bar'))
2603 'file:///c:/tmp/foo/bar'
2603 'file:///c:/tmp/foo/bar'
2604 >>> print url(r'bundle:foo\bar')
2604 >>> print url(r'bundle:foo\bar')
2605 bundle:foo\bar
2605 bundle:foo\bar
2606 >>> print url(r'file:///D:\data\hg')
2606 >>> print url(r'file:///D:\data\hg')
2607 file:///D:\data\hg
2607 file:///D:\data\hg
2608 """
2608 """
2609 if self._localpath:
2609 if self._localpath:
2610 s = self.path
2610 s = self.path
2611 if self.scheme == 'bundle':
2611 if self.scheme == 'bundle':
2612 s = 'bundle:' + s
2612 s = 'bundle:' + s
2613 if self.fragment:
2613 if self.fragment:
2614 s += '#' + self.fragment
2614 s += '#' + self.fragment
2615 return s
2615 return s
2616
2616
2617 s = self.scheme + ':'
2617 s = self.scheme + ':'
2618 if self.user or self.passwd or self.host:
2618 if self.user or self.passwd or self.host:
2619 s += '//'
2619 s += '//'
2620 elif self.scheme and (not self.path or self.path.startswith('/')
2620 elif self.scheme and (not self.path or self.path.startswith('/')
2621 or hasdriveletter(self.path)):
2621 or hasdriveletter(self.path)):
2622 s += '//'
2622 s += '//'
2623 if hasdriveletter(self.path):
2623 if hasdriveletter(self.path):
2624 s += '/'
2624 s += '/'
2625 if self.user:
2625 if self.user:
2626 s += urlreq.quote(self.user, safe=self._safechars)
2626 s += urlreq.quote(self.user, safe=self._safechars)
2627 if self.passwd:
2627 if self.passwd:
2628 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2628 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2629 if self.user or self.passwd:
2629 if self.user or self.passwd:
2630 s += '@'
2630 s += '@'
2631 if self.host:
2631 if self.host:
2632 if not (self.host.startswith('[') and self.host.endswith(']')):
2632 if not (self.host.startswith('[') and self.host.endswith(']')):
2633 s += urlreq.quote(self.host)
2633 s += urlreq.quote(self.host)
2634 else:
2634 else:
2635 s += self.host
2635 s += self.host
2636 if self.port:
2636 if self.port:
2637 s += ':' + urlreq.quote(self.port)
2637 s += ':' + urlreq.quote(self.port)
2638 if self.host:
2638 if self.host:
2639 s += '/'
2639 s += '/'
2640 if self.path:
2640 if self.path:
2641 # TODO: similar to the query string, we should not unescape the
2641 # TODO: similar to the query string, we should not unescape the
2642 # path when we store it, the path might contain '%2f' = '/',
2642 # path when we store it, the path might contain '%2f' = '/',
2643 # which we should *not* escape.
2643 # which we should *not* escape.
2644 s += urlreq.quote(self.path, safe=self._safepchars)
2644 s += urlreq.quote(self.path, safe=self._safepchars)
2645 if self.query:
2645 if self.query:
2646 # we store the query in escaped form.
2646 # we store the query in escaped form.
2647 s += '?' + self.query
2647 s += '?' + self.query
2648 if self.fragment is not None:
2648 if self.fragment is not None:
2649 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2649 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2650 return s
2650 return s
2651
2651
2652 def authinfo(self):
2652 def authinfo(self):
2653 user, passwd = self.user, self.passwd
2653 user, passwd = self.user, self.passwd
2654 try:
2654 try:
2655 self.user, self.passwd = None, None
2655 self.user, self.passwd = None, None
2656 s = str(self)
2656 s = str(self)
2657 finally:
2657 finally:
2658 self.user, self.passwd = user, passwd
2658 self.user, self.passwd = user, passwd
2659 if not self.user:
2659 if not self.user:
2660 return (s, None)
2660 return (s, None)
2661 # authinfo[1] is passed to urllib2 password manager, and its
2661 # authinfo[1] is passed to urllib2 password manager, and its
2662 # URIs must not contain credentials. The host is passed in the
2662 # URIs must not contain credentials. The host is passed in the
2663 # URIs list because Python < 2.4.3 uses only that to search for
2663 # URIs list because Python < 2.4.3 uses only that to search for
2664 # a password.
2664 # a password.
2665 return (s, (None, (s, self.host),
2665 return (s, (None, (s, self.host),
2666 self.user, self.passwd or ''))
2666 self.user, self.passwd or ''))
2667
2667
2668 def isabs(self):
2668 def isabs(self):
2669 if self.scheme and self.scheme != 'file':
2669 if self.scheme and self.scheme != 'file':
2670 return True # remote URL
2670 return True # remote URL
2671 if hasdriveletter(self.path):
2671 if hasdriveletter(self.path):
2672 return True # absolute for our purposes - can't be joined()
2672 return True # absolute for our purposes - can't be joined()
2673 if self.path.startswith(r'\\'):
2673 if self.path.startswith(r'\\'):
2674 return True # Windows UNC path
2674 return True # Windows UNC path
2675 if self.path.startswith('/'):
2675 if self.path.startswith('/'):
2676 return True # POSIX-style
2676 return True # POSIX-style
2677 return False
2677 return False
2678
2678
2679 def localpath(self):
2679 def localpath(self):
2680 if self.scheme == 'file' or self.scheme == 'bundle':
2680 if self.scheme == 'file' or self.scheme == 'bundle':
2681 path = self.path or '/'
2681 path = self.path or '/'
2682 # For Windows, we need to promote hosts containing drive
2682 # For Windows, we need to promote hosts containing drive
2683 # letters to paths with drive letters.
2683 # letters to paths with drive letters.
2684 if hasdriveletter(self._hostport):
2684 if hasdriveletter(self._hostport):
2685 path = self._hostport + '/' + self.path
2685 path = self._hostport + '/' + self.path
2686 elif (self.host is not None and self.path
2686 elif (self.host is not None and self.path
2687 and not hasdriveletter(path)):
2687 and not hasdriveletter(path)):
2688 path = '/' + path
2688 path = '/' + path
2689 return path
2689 return path
2690 return self._origpath
2690 return self._origpath
2691
2691
2692 def islocal(self):
2692 def islocal(self):
2693 '''whether localpath will return something that posixfile can open'''
2693 '''whether localpath will return something that posixfile can open'''
2694 return (not self.scheme or self.scheme == 'file'
2694 return (not self.scheme or self.scheme == 'file'
2695 or self.scheme == 'bundle')
2695 or self.scheme == 'bundle')
2696
2696
2697 def hasscheme(path):
2697 def hasscheme(path):
2698 return bool(url(path).scheme)
2698 return bool(url(path).scheme)
2699
2699
2700 def hasdriveletter(path):
2700 def hasdriveletter(path):
2701 return path and path[1:2] == ':' and path[0:1].isalpha()
2701 return path and path[1:2] == ':' and path[0:1].isalpha()
2702
2702
2703 def urllocalpath(path):
2703 def urllocalpath(path):
2704 return url(path, parsequery=False, parsefragment=False).localpath()
2704 return url(path, parsequery=False, parsefragment=False).localpath()
2705
2705
2706 def hidepassword(u):
2706 def hidepassword(u):
2707 '''hide user credential in a url string'''
2707 '''hide user credential in a url string'''
2708 u = url(u)
2708 u = url(u)
2709 if u.passwd:
2709 if u.passwd:
2710 u.passwd = '***'
2710 u.passwd = '***'
2711 return str(u)
2711 return str(u)
2712
2712
2713 def removeauth(u):
2713 def removeauth(u):
2714 '''remove all authentication information from a url string'''
2714 '''remove all authentication information from a url string'''
2715 u = url(u)
2715 u = url(u)
2716 u.user = u.passwd = None
2716 u.user = u.passwd = None
2717 return str(u)
2717 return str(u)
2718
2718
2719 def isatty(fp):
2719 def isatty(fp):
2720 try:
2720 try:
2721 return fp.isatty()
2721 return fp.isatty()
2722 except AttributeError:
2722 except AttributeError:
2723 return False
2723 return False
2724
2724
2725 timecount = unitcountfn(
2725 timecount = unitcountfn(
2726 (1, 1e3, _('%.0f s')),
2726 (1, 1e3, _('%.0f s')),
2727 (100, 1, _('%.1f s')),
2727 (100, 1, _('%.1f s')),
2728 (10, 1, _('%.2f s')),
2728 (10, 1, _('%.2f s')),
2729 (1, 1, _('%.3f s')),
2729 (1, 1, _('%.3f s')),
2730 (100, 0.001, _('%.1f ms')),
2730 (100, 0.001, _('%.1f ms')),
2731 (10, 0.001, _('%.2f ms')),
2731 (10, 0.001, _('%.2f ms')),
2732 (1, 0.001, _('%.3f ms')),
2732 (1, 0.001, _('%.3f ms')),
2733 (100, 0.000001, _('%.1f us')),
2733 (100, 0.000001, _('%.1f us')),
2734 (10, 0.000001, _('%.2f us')),
2734 (10, 0.000001, _('%.2f us')),
2735 (1, 0.000001, _('%.3f us')),
2735 (1, 0.000001, _('%.3f us')),
2736 (100, 0.000000001, _('%.1f ns')),
2736 (100, 0.000000001, _('%.1f ns')),
2737 (10, 0.000000001, _('%.2f ns')),
2737 (10, 0.000000001, _('%.2f ns')),
2738 (1, 0.000000001, _('%.3f ns')),
2738 (1, 0.000000001, _('%.3f ns')),
2739 )
2739 )
2740
2740
2741 _timenesting = [0]
2741 _timenesting = [0]
2742
2742
2743 def timed(func):
2743 def timed(func):
2744 '''Report the execution time of a function call to stderr.
2744 '''Report the execution time of a function call to stderr.
2745
2745
2746 During development, use as a decorator when you need to measure
2746 During development, use as a decorator when you need to measure
2747 the cost of a function, e.g. as follows:
2747 the cost of a function, e.g. as follows:
2748
2748
2749 @util.timed
2749 @util.timed
2750 def foo(a, b, c):
2750 def foo(a, b, c):
2751 pass
2751 pass
2752 '''
2752 '''
2753
2753
2754 def wrapper(*args, **kwargs):
2754 def wrapper(*args, **kwargs):
2755 start = time.time()
2755 start = time.time()
2756 indent = 2
2756 indent = 2
2757 _timenesting[0] += indent
2757 _timenesting[0] += indent
2758 try:
2758 try:
2759 return func(*args, **kwargs)
2759 return func(*args, **kwargs)
2760 finally:
2760 finally:
2761 elapsed = time.time() - start
2761 elapsed = time.time() - start
2762 _timenesting[0] -= indent
2762 _timenesting[0] -= indent
2763 stderr.write('%s%s: %s\n' %
2763 stderr.write('%s%s: %s\n' %
2764 (' ' * _timenesting[0], func.__name__,
2764 (' ' * _timenesting[0], func.__name__,
2765 timecount(elapsed)))
2765 timecount(elapsed)))
2766 return wrapper
2766 return wrapper
2767
2767
2768 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2768 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2769 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2769 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2770
2770
2771 def sizetoint(s):
2771 def sizetoint(s):
2772 '''Convert a space specifier to a byte count.
2772 '''Convert a space specifier to a byte count.
2773
2773
2774 >>> sizetoint('30')
2774 >>> sizetoint('30')
2775 30
2775 30
2776 >>> sizetoint('2.2kb')
2776 >>> sizetoint('2.2kb')
2777 2252
2777 2252
2778 >>> sizetoint('6M')
2778 >>> sizetoint('6M')
2779 6291456
2779 6291456
2780 '''
2780 '''
2781 t = s.strip().lower()
2781 t = s.strip().lower()
2782 try:
2782 try:
2783 for k, u in _sizeunits:
2783 for k, u in _sizeunits:
2784 if t.endswith(k):
2784 if t.endswith(k):
2785 return int(float(t[:-len(k)]) * u)
2785 return int(float(t[:-len(k)]) * u)
2786 return int(t)
2786 return int(t)
2787 except ValueError:
2787 except ValueError:
2788 raise error.ParseError(_("couldn't parse size: %s") % s)
2788 raise error.ParseError(_("couldn't parse size: %s") % s)
2789
2789
2790 class hooks(object):
2790 class hooks(object):
2791 '''A collection of hook functions that can be used to extend a
2791 '''A collection of hook functions that can be used to extend a
2792 function's behavior. Hooks are called in lexicographic order,
2792 function's behavior. Hooks are called in lexicographic order,
2793 based on the names of their sources.'''
2793 based on the names of their sources.'''
2794
2794
2795 def __init__(self):
2795 def __init__(self):
2796 self._hooks = []
2796 self._hooks = []
2797
2797
2798 def add(self, source, hook):
2798 def add(self, source, hook):
2799 self._hooks.append((source, hook))
2799 self._hooks.append((source, hook))
2800
2800
2801 def __call__(self, *args):
2801 def __call__(self, *args):
2802 self._hooks.sort(key=lambda x: x[0])
2802 self._hooks.sort(key=lambda x: x[0])
2803 results = []
2803 results = []
2804 for source, hook in self._hooks:
2804 for source, hook in self._hooks:
2805 results.append(hook(*args))
2805 results.append(hook(*args))
2806 return results
2806 return results
2807
2807
2808 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2808 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s'):
2809 '''Yields lines for a nicely formatted stacktrace.
2809 '''Yields lines for a nicely formatted stacktrace.
2810 Skips the 'skip' last entries.
2810 Skips the 'skip' last entries.
2811 Each file+linenumber is formatted according to fileline.
2811 Each file+linenumber is formatted according to fileline.
2812 Each line is formatted according to line.
2812 Each line is formatted according to line.
2813 If line is None, it yields:
2813 If line is None, it yields:
2814 length of longest filepath+line number,
2814 length of longest filepath+line number,
2815 filepath+linenumber,
2815 filepath+linenumber,
2816 function
2816 function
2817
2817
2818 Not be used in production code but very convenient while developing.
2818 Not be used in production code but very convenient while developing.
2819 '''
2819 '''
2820 entries = [(fileline % (fn, ln), func)
2820 entries = [(fileline % (fn, ln), func)
2821 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2821 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2822 if entries:
2822 if entries:
2823 fnmax = max(len(entry[0]) for entry in entries)
2823 fnmax = max(len(entry[0]) for entry in entries)
2824 for fnln, func in entries:
2824 for fnln, func in entries:
2825 if line is None:
2825 if line is None:
2826 yield (fnmax, fnln, func)
2826 yield (fnmax, fnln, func)
2827 else:
2827 else:
2828 yield line % (fnmax, fnln, func)
2828 yield line % (fnmax, fnln, func)
2829
2829
2830 def debugstacktrace(msg='stacktrace', skip=0, f=stderr, otherf=stdout):
2830 def debugstacktrace(msg='stacktrace', skip=0, f=stderr, otherf=stdout):
2831 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2831 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2832 Skips the 'skip' last entries. By default it will flush stdout first.
2832 Skips the 'skip' last entries. By default it will flush stdout first.
2833 It can be used everywhere and intentionally does not require an ui object.
2833 It can be used everywhere and intentionally does not require an ui object.
2834 Not be used in production code but very convenient while developing.
2834 Not be used in production code but very convenient while developing.
2835 '''
2835 '''
2836 if otherf:
2836 if otherf:
2837 otherf.flush()
2837 otherf.flush()
2838 f.write('%s at:\n' % msg)
2838 f.write('%s at:\n' % msg)
2839 for line in getstackframes(skip + 1):
2839 for line in getstackframes(skip + 1):
2840 f.write(line)
2840 f.write(line)
2841 f.flush()
2841 f.flush()
2842
2842
2843 class dirs(object):
2843 class dirs(object):
2844 '''a multiset of directory names from a dirstate or manifest'''
2844 '''a multiset of directory names from a dirstate or manifest'''
2845
2845
2846 def __init__(self, map, skip=None):
2846 def __init__(self, map, skip=None):
2847 self._dirs = {}
2847 self._dirs = {}
2848 addpath = self.addpath
2848 addpath = self.addpath
2849 if safehasattr(map, 'iteritems') and skip is not None:
2849 if safehasattr(map, 'iteritems') and skip is not None:
2850 for f, s in map.iteritems():
2850 for f, s in map.iteritems():
2851 if s[0] != skip:
2851 if s[0] != skip:
2852 addpath(f)
2852 addpath(f)
2853 else:
2853 else:
2854 for f in map:
2854 for f in map:
2855 addpath(f)
2855 addpath(f)
2856
2856
2857 def addpath(self, path):
2857 def addpath(self, path):
2858 dirs = self._dirs
2858 dirs = self._dirs
2859 for base in finddirs(path):
2859 for base in finddirs(path):
2860 if base in dirs:
2860 if base in dirs:
2861 dirs[base] += 1
2861 dirs[base] += 1
2862 return
2862 return
2863 dirs[base] = 1
2863 dirs[base] = 1
2864
2864
2865 def delpath(self, path):
2865 def delpath(self, path):
2866 dirs = self._dirs
2866 dirs = self._dirs
2867 for base in finddirs(path):
2867 for base in finddirs(path):
2868 if dirs[base] > 1:
2868 if dirs[base] > 1:
2869 dirs[base] -= 1
2869 dirs[base] -= 1
2870 return
2870 return
2871 del dirs[base]
2871 del dirs[base]
2872
2872
2873 def __iter__(self):
2873 def __iter__(self):
2874 return self._dirs.iterkeys()
2874 return self._dirs.iterkeys()
2875
2875
2876 def __contains__(self, d):
2876 def __contains__(self, d):
2877 return d in self._dirs
2877 return d in self._dirs
2878
2878
2879 if safehasattr(parsers, 'dirs'):
2879 if safehasattr(parsers, 'dirs'):
2880 dirs = parsers.dirs
2880 dirs = parsers.dirs
2881
2881
2882 def finddirs(path):
2882 def finddirs(path):
2883 pos = path.rfind('/')
2883 pos = path.rfind('/')
2884 while pos != -1:
2884 while pos != -1:
2885 yield path[:pos]
2885 yield path[:pos]
2886 pos = path.rfind('/', 0, pos)
2886 pos = path.rfind('/', 0, pos)
2887
2887
2888 class ctxmanager(object):
2888 class ctxmanager(object):
2889 '''A context manager for use in 'with' blocks to allow multiple
2889 '''A context manager for use in 'with' blocks to allow multiple
2890 contexts to be entered at once. This is both safer and more
2890 contexts to be entered at once. This is both safer and more
2891 flexible than contextlib.nested.
2891 flexible than contextlib.nested.
2892
2892
2893 Once Mercurial supports Python 2.7+, this will become mostly
2893 Once Mercurial supports Python 2.7+, this will become mostly
2894 unnecessary.
2894 unnecessary.
2895 '''
2895 '''
2896
2896
2897 def __init__(self, *args):
2897 def __init__(self, *args):
2898 '''Accepts a list of no-argument functions that return context
2898 '''Accepts a list of no-argument functions that return context
2899 managers. These will be invoked at __call__ time.'''
2899 managers. These will be invoked at __call__ time.'''
2900 self._pending = args
2900 self._pending = args
2901 self._atexit = []
2901 self._atexit = []
2902
2902
2903 def __enter__(self):
2903 def __enter__(self):
2904 return self
2904 return self
2905
2905
2906 def enter(self):
2906 def enter(self):
2907 '''Create and enter context managers in the order in which they were
2907 '''Create and enter context managers in the order in which they were
2908 passed to the constructor.'''
2908 passed to the constructor.'''
2909 values = []
2909 values = []
2910 for func in self._pending:
2910 for func in self._pending:
2911 obj = func()
2911 obj = func()
2912 values.append(obj.__enter__())
2912 values.append(obj.__enter__())
2913 self._atexit.append(obj.__exit__)
2913 self._atexit.append(obj.__exit__)
2914 del self._pending
2914 del self._pending
2915 return values
2915 return values
2916
2916
2917 def atexit(self, func, *args, **kwargs):
2917 def atexit(self, func, *args, **kwargs):
2918 '''Add a function to call when this context manager exits. The
2918 '''Add a function to call when this context manager exits. The
2919 ordering of multiple atexit calls is unspecified, save that
2919 ordering of multiple atexit calls is unspecified, save that
2920 they will happen before any __exit__ functions.'''
2920 they will happen before any __exit__ functions.'''
2921 def wrapper(exc_type, exc_val, exc_tb):
2921 def wrapper(exc_type, exc_val, exc_tb):
2922 func(*args, **kwargs)
2922 func(*args, **kwargs)
2923 self._atexit.append(wrapper)
2923 self._atexit.append(wrapper)
2924 return func
2924 return func
2925
2925
2926 def __exit__(self, exc_type, exc_val, exc_tb):
2926 def __exit__(self, exc_type, exc_val, exc_tb):
2927 '''Context managers are exited in the reverse order from which
2927 '''Context managers are exited in the reverse order from which
2928 they were created.'''
2928 they were created.'''
2929 received = exc_type is not None
2929 received = exc_type is not None
2930 suppressed = False
2930 suppressed = False
2931 pending = None
2931 pending = None
2932 self._atexit.reverse()
2932 self._atexit.reverse()
2933 for exitfunc in self._atexit:
2933 for exitfunc in self._atexit:
2934 try:
2934 try:
2935 if exitfunc(exc_type, exc_val, exc_tb):
2935 if exitfunc(exc_type, exc_val, exc_tb):
2936 suppressed = True
2936 suppressed = True
2937 exc_type = None
2937 exc_type = None
2938 exc_val = None
2938 exc_val = None
2939 exc_tb = None
2939 exc_tb = None
2940 except BaseException:
2940 except BaseException:
2941 pending = sys.exc_info()
2941 pending = sys.exc_info()
2942 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2942 exc_type, exc_val, exc_tb = pending = sys.exc_info()
2943 del self._atexit
2943 del self._atexit
2944 if pending:
2944 if pending:
2945 raise exc_val
2945 raise exc_val
2946 return received and suppressed
2946 return received and suppressed
2947
2947
2948 # compression code
2948 # compression code
2949
2949
2950 class compressormanager(object):
2950 class compressormanager(object):
2951 """Holds registrations of various compression engines.
2951 """Holds registrations of various compression engines.
2952
2952
2953 This class essentially abstracts the differences between compression
2953 This class essentially abstracts the differences between compression
2954 engines to allow new compression formats to be added easily, possibly from
2954 engines to allow new compression formats to be added easily, possibly from
2955 extensions.
2955 extensions.
2956
2956
2957 Compressors are registered against the global instance by calling its
2957 Compressors are registered against the global instance by calling its
2958 ``register()`` method.
2958 ``register()`` method.
2959 """
2959 """
2960 def __init__(self):
2960 def __init__(self):
2961 self._engines = {}
2961 self._engines = {}
2962 # Bundle spec human name to engine name.
2962 # Bundle spec human name to engine name.
2963 self._bundlenames = {}
2963 self._bundlenames = {}
2964 # Internal bundle identifier to engine name.
2964 # Internal bundle identifier to engine name.
2965 self._bundletypes = {}
2965 self._bundletypes = {}
2966
2966
2967 def __getitem__(self, key):
2967 def __getitem__(self, key):
2968 return self._engines[key]
2968 return self._engines[key]
2969
2969
2970 def __contains__(self, key):
2970 def __contains__(self, key):
2971 return key in self._engines
2971 return key in self._engines
2972
2972
2973 def __iter__(self):
2973 def __iter__(self):
2974 return iter(self._engines.keys())
2974 return iter(self._engines.keys())
2975
2975
2976 def register(self, engine):
2976 def register(self, engine):
2977 """Register a compression engine with the manager.
2977 """Register a compression engine with the manager.
2978
2978
2979 The argument must be a ``compressionengine`` instance.
2979 The argument must be a ``compressionengine`` instance.
2980 """
2980 """
2981 if not isinstance(engine, compressionengine):
2981 if not isinstance(engine, compressionengine):
2982 raise ValueError(_('argument must be a compressionengine'))
2982 raise ValueError(_('argument must be a compressionengine'))
2983
2983
2984 name = engine.name()
2984 name = engine.name()
2985
2985
2986 if name in self._engines:
2986 if name in self._engines:
2987 raise error.Abort(_('compression engine %s already registered') %
2987 raise error.Abort(_('compression engine %s already registered') %
2988 name)
2988 name)
2989
2989
2990 bundleinfo = engine.bundletype()
2990 bundleinfo = engine.bundletype()
2991 if bundleinfo:
2991 if bundleinfo:
2992 bundlename, bundletype = bundleinfo
2992 bundlename, bundletype = bundleinfo
2993
2993
2994 if bundlename in self._bundlenames:
2994 if bundlename in self._bundlenames:
2995 raise error.Abort(_('bundle name %s already registered') %
2995 raise error.Abort(_('bundle name %s already registered') %
2996 bundlename)
2996 bundlename)
2997 if bundletype in self._bundletypes:
2997 if bundletype in self._bundletypes:
2998 raise error.Abort(_('bundle type %s already registered by %s') %
2998 raise error.Abort(_('bundle type %s already registered by %s') %
2999 (bundletype, self._bundletypes[bundletype]))
2999 (bundletype, self._bundletypes[bundletype]))
3000
3000
3001 # No external facing name declared.
3001 # No external facing name declared.
3002 if bundlename:
3002 if bundlename:
3003 self._bundlenames[bundlename] = name
3003 self._bundlenames[bundlename] = name
3004
3004
3005 self._bundletypes[bundletype] = name
3005 self._bundletypes[bundletype] = name
3006
3006
3007 self._engines[name] = engine
3007 self._engines[name] = engine
3008
3008
3009 @property
3009 @property
3010 def supportedbundlenames(self):
3010 def supportedbundlenames(self):
3011 return set(self._bundlenames.keys())
3011 return set(self._bundlenames.keys())
3012
3012
3013 @property
3013 @property
3014 def supportedbundletypes(self):
3014 def supportedbundletypes(self):
3015 return set(self._bundletypes.keys())
3015 return set(self._bundletypes.keys())
3016
3016
3017 def forbundlename(self, bundlename):
3017 def forbundlename(self, bundlename):
3018 """Obtain a compression engine registered to a bundle name.
3018 """Obtain a compression engine registered to a bundle name.
3019
3019
3020 Will raise KeyError if the bundle type isn't registered.
3020 Will raise KeyError if the bundle type isn't registered.
3021
3021
3022 Will abort if the engine is known but not available.
3022 Will abort if the engine is known but not available.
3023 """
3023 """
3024 engine = self._engines[self._bundlenames[bundlename]]
3024 engine = self._engines[self._bundlenames[bundlename]]
3025 if not engine.available():
3025 if not engine.available():
3026 raise error.Abort(_('compression engine %s could not be loaded') %
3026 raise error.Abort(_('compression engine %s could not be loaded') %
3027 engine.name())
3027 engine.name())
3028 return engine
3028 return engine
3029
3029
3030 def forbundletype(self, bundletype):
3030 def forbundletype(self, bundletype):
3031 """Obtain a compression engine registered to a bundle type.
3031 """Obtain a compression engine registered to a bundle type.
3032
3032
3033 Will raise KeyError if the bundle type isn't registered.
3033 Will raise KeyError if the bundle type isn't registered.
3034
3034
3035 Will abort if the engine is known but not available.
3035 Will abort if the engine is known but not available.
3036 """
3036 """
3037 engine = self._engines[self._bundletypes[bundletype]]
3037 engine = self._engines[self._bundletypes[bundletype]]
3038 if not engine.available():
3038 if not engine.available():
3039 raise error.Abort(_('compression engine %s could not be loaded') %
3039 raise error.Abort(_('compression engine %s could not be loaded') %
3040 engine.name())
3040 engine.name())
3041 return engine
3041 return engine
3042
3042
3043 compengines = compressormanager()
3043 compengines = compressormanager()
3044
3044
3045 class compressionengine(object):
3045 class compressionengine(object):
3046 """Base class for compression engines.
3046 """Base class for compression engines.
3047
3047
3048 Compression engines must implement the interface defined by this class.
3048 Compression engines must implement the interface defined by this class.
3049 """
3049 """
3050 def name(self):
3050 def name(self):
3051 """Returns the name of the compression engine.
3051 """Returns the name of the compression engine.
3052
3052
3053 This is the key the engine is registered under.
3053 This is the key the engine is registered under.
3054
3054
3055 This method must be implemented.
3055 This method must be implemented.
3056 """
3056 """
3057 raise NotImplementedError()
3057 raise NotImplementedError()
3058
3058
3059 def available(self):
3059 def available(self):
3060 """Whether the compression engine is available.
3060 """Whether the compression engine is available.
3061
3061
3062 The intent of this method is to allow optional compression engines
3062 The intent of this method is to allow optional compression engines
3063 that may not be available in all installations (such as engines relying
3063 that may not be available in all installations (such as engines relying
3064 on C extensions that may not be present).
3064 on C extensions that may not be present).
3065 """
3065 """
3066 return True
3066 return True
3067
3067
3068 def bundletype(self):
3068 def bundletype(self):
3069 """Describes bundle identifiers for this engine.
3069 """Describes bundle identifiers for this engine.
3070
3070
3071 If this compression engine isn't supported for bundles, returns None.
3071 If this compression engine isn't supported for bundles, returns None.
3072
3072
3073 If this engine can be used for bundles, returns a 2-tuple of strings of
3073 If this engine can be used for bundles, returns a 2-tuple of strings of
3074 the user-facing "bundle spec" compression name and an internal
3074 the user-facing "bundle spec" compression name and an internal
3075 identifier used to denote the compression format within bundles. To
3075 identifier used to denote the compression format within bundles. To
3076 exclude the name from external usage, set the first element to ``None``.
3076 exclude the name from external usage, set the first element to ``None``.
3077
3077
3078 If bundle compression is supported, the class must also implement
3078 If bundle compression is supported, the class must also implement
3079 ``compressstream`` and `decompressorreader``.
3079 ``compressstream`` and `decompressorreader``.
3080 """
3080 """
3081 return None
3081 return None
3082
3082
3083 def compressstream(self, it, opts=None):
3083 def compressstream(self, it, opts=None):
3084 """Compress an iterator of chunks.
3084 """Compress an iterator of chunks.
3085
3085
3086 The method receives an iterator (ideally a generator) of chunks of
3086 The method receives an iterator (ideally a generator) of chunks of
3087 bytes to be compressed. It returns an iterator (ideally a generator)
3087 bytes to be compressed. It returns an iterator (ideally a generator)
3088 of bytes of chunks representing the compressed output.
3088 of bytes of chunks representing the compressed output.
3089
3089
3090 Optionally accepts an argument defining how to perform compression.
3090 Optionally accepts an argument defining how to perform compression.
3091 Each engine treats this argument differently.
3091 Each engine treats this argument differently.
3092 """
3092 """
3093 raise NotImplementedError()
3093 raise NotImplementedError()
3094
3094
3095 def decompressorreader(self, fh):
3095 def decompressorreader(self, fh):
3096 """Perform decompression on a file object.
3096 """Perform decompression on a file object.
3097
3097
3098 Argument is an object with a ``read(size)`` method that returns
3098 Argument is an object with a ``read(size)`` method that returns
3099 compressed data. Return value is an object with a ``read(size)`` that
3099 compressed data. Return value is an object with a ``read(size)`` that
3100 returns uncompressed data.
3100 returns uncompressed data.
3101 """
3101 """
3102 raise NotImplementedError()
3102 raise NotImplementedError()
3103
3103
3104 class _zlibengine(compressionengine):
3104 class _zlibengine(compressionengine):
3105 def name(self):
3105 def name(self):
3106 return 'zlib'
3106 return 'zlib'
3107
3107
3108 def bundletype(self):
3108 def bundletype(self):
3109 return 'gzip', 'GZ'
3109 return 'gzip', 'GZ'
3110
3110
3111 def compressstream(self, it, opts=None):
3111 def compressstream(self, it, opts=None):
3112 opts = opts or {}
3112 opts = opts or {}
3113
3113
3114 z = zlib.compressobj(opts.get('level', -1))
3114 z = zlib.compressobj(opts.get('level', -1))
3115 for chunk in it:
3115 for chunk in it:
3116 data = z.compress(chunk)
3116 data = z.compress(chunk)
3117 # Not all calls to compress emit data. It is cheaper to inspect
3117 # Not all calls to compress emit data. It is cheaper to inspect
3118 # here than to feed empty chunks through generator.
3118 # here than to feed empty chunks through generator.
3119 if data:
3119 if data:
3120 yield data
3120 yield data
3121
3121
3122 yield z.flush()
3122 yield z.flush()
3123
3123
3124 def decompressorreader(self, fh):
3124 def decompressorreader(self, fh):
3125 def gen():
3125 def gen():
3126 d = zlib.decompressobj()
3126 d = zlib.decompressobj()
3127 for chunk in filechunkiter(fh):
3127 for chunk in filechunkiter(fh):
3128 while chunk:
3128 while chunk:
3129 # Limit output size to limit memory.
3129 # Limit output size to limit memory.
3130 yield d.decompress(chunk, 2 ** 18)
3130 yield d.decompress(chunk, 2 ** 18)
3131 chunk = d.unconsumed_tail
3131 chunk = d.unconsumed_tail
3132
3132
3133 return chunkbuffer(gen())
3133 return chunkbuffer(gen())
3134
3134
3135 compengines.register(_zlibengine())
3135 compengines.register(_zlibengine())
3136
3136
3137 class _bz2engine(compressionengine):
3137 class _bz2engine(compressionengine):
3138 def name(self):
3138 def name(self):
3139 return 'bz2'
3139 return 'bz2'
3140
3140
3141 def bundletype(self):
3141 def bundletype(self):
3142 return 'bzip2', 'BZ'
3142 return 'bzip2', 'BZ'
3143
3143
3144 def compressstream(self, it, opts=None):
3144 def compressstream(self, it, opts=None):
3145 opts = opts or {}
3145 opts = opts or {}
3146 z = bz2.BZ2Compressor(opts.get('level', 9))
3146 z = bz2.BZ2Compressor(opts.get('level', 9))
3147 for chunk in it:
3147 for chunk in it:
3148 data = z.compress(chunk)
3148 data = z.compress(chunk)
3149 if data:
3149 if data:
3150 yield data
3150 yield data
3151
3151
3152 yield z.flush()
3152 yield z.flush()
3153
3153
3154 def decompressorreader(self, fh):
3154 def decompressorreader(self, fh):
3155 def gen():
3155 def gen():
3156 d = bz2.BZ2Decompressor()
3156 d = bz2.BZ2Decompressor()
3157 for chunk in filechunkiter(fh):
3157 for chunk in filechunkiter(fh):
3158 yield d.decompress(chunk)
3158 yield d.decompress(chunk)
3159
3159
3160 return chunkbuffer(gen())
3160 return chunkbuffer(gen())
3161
3161
3162 compengines.register(_bz2engine())
3162 compengines.register(_bz2engine())
3163
3163
3164 class _truncatedbz2engine(compressionengine):
3164 class _truncatedbz2engine(compressionengine):
3165 def name(self):
3165 def name(self):
3166 return 'bz2truncated'
3166 return 'bz2truncated'
3167
3167
3168 def bundletype(self):
3168 def bundletype(self):
3169 return None, '_truncatedBZ'
3169 return None, '_truncatedBZ'
3170
3170
3171 # We don't implement compressstream because it is hackily handled elsewhere.
3171 # We don't implement compressstream because it is hackily handled elsewhere.
3172
3172
3173 def decompressorreader(self, fh):
3173 def decompressorreader(self, fh):
3174 def gen():
3174 def gen():
3175 # The input stream doesn't have the 'BZ' header. So add it back.
3175 # The input stream doesn't have the 'BZ' header. So add it back.
3176 d = bz2.BZ2Decompressor()
3176 d = bz2.BZ2Decompressor()
3177 d.decompress('BZ')
3177 d.decompress('BZ')
3178 for chunk in filechunkiter(fh):
3178 for chunk in filechunkiter(fh):
3179 yield d.decompress(chunk)
3179 yield d.decompress(chunk)
3180
3180
3181 return chunkbuffer(gen())
3181 return chunkbuffer(gen())
3182
3182
3183 compengines.register(_truncatedbz2engine())
3183 compengines.register(_truncatedbz2engine())
3184
3184
3185 class _noopengine(compressionengine):
3185 class _noopengine(compressionengine):
3186 def name(self):
3186 def name(self):
3187 return 'none'
3187 return 'none'
3188
3188
3189 def bundletype(self):
3189 def bundletype(self):
3190 return 'none', 'UN'
3190 return 'none', 'UN'
3191
3191
3192 def compressstream(self, it, opts=None):
3192 def compressstream(self, it, opts=None):
3193 return it
3193 return it
3194
3194
3195 def decompressorreader(self, fh):
3195 def decompressorreader(self, fh):
3196 return fh
3196 return fh
3197
3197
3198 compengines.register(_noopengine())
3198 compengines.register(_noopengine())
3199
3199
3200 class _zstdengine(compressionengine):
3200 class _zstdengine(compressionengine):
3201 def name(self):
3201 def name(self):
3202 return 'zstd'
3202 return 'zstd'
3203
3203
3204 @propertycache
3204 @propertycache
3205 def _module(self):
3205 def _module(self):
3206 # Not all installs have the zstd module available. So defer importing
3206 # Not all installs have the zstd module available. So defer importing
3207 # until first access.
3207 # until first access.
3208 try:
3208 try:
3209 from . import zstd
3209 from . import zstd
3210 # Force delayed import.
3210 # Force delayed import.
3211 zstd.__version__
3211 zstd.__version__
3212 return zstd
3212 return zstd
3213 except ImportError:
3213 except ImportError:
3214 return None
3214 return None
3215
3215
3216 def available(self):
3216 def available(self):
3217 return bool(self._module)
3217 return bool(self._module)
3218
3218
3219 def bundletype(self):
3219 def bundletype(self):
3220 return 'zstd', 'ZS'
3220 return 'zstd', 'ZS'
3221
3221
3222 def compressstream(self, it, opts=None):
3222 def compressstream(self, it, opts=None):
3223 opts = opts or {}
3223 opts = opts or {}
3224 # zstd level 3 is almost always significantly faster than zlib
3224 # zstd level 3 is almost always significantly faster than zlib
3225 # while providing no worse compression. It strikes a good balance
3225 # while providing no worse compression. It strikes a good balance
3226 # between speed and compression.
3226 # between speed and compression.
3227 level = opts.get('level', 3)
3227 level = opts.get('level', 3)
3228
3228
3229 zstd = self._module
3229 zstd = self._module
3230 z = zstd.ZstdCompressor(level=level).compressobj()
3230 z = zstd.ZstdCompressor(level=level).compressobj()
3231 for chunk in it:
3231 for chunk in it:
3232 data = z.compress(chunk)
3232 data = z.compress(chunk)
3233 if data:
3233 if data:
3234 yield data
3234 yield data
3235
3235
3236 yield z.flush()
3236 yield z.flush()
3237
3237
3238 def decompressorreader(self, fh):
3238 def decompressorreader(self, fh):
3239 zstd = self._module
3239 zstd = self._module
3240 dctx = zstd.ZstdDecompressor()
3240 dctx = zstd.ZstdDecompressor()
3241 return chunkbuffer(dctx.read_from(fh))
3241 return chunkbuffer(dctx.read_from(fh))
3242
3242
3243 compengines.register(_zstdengine())
3243 compengines.register(_zstdengine())
3244
3244
3245 # convenient shortcut
3245 # convenient shortcut
3246 dst = debugstacktrace
3246 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now