##// END OF EJS Templates
localrepo: remove all external users of localrepo.opener...
Angel Ezquerra -
r23877:7cc77030 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,158 +1,158 b''
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13 Examples::
13 Examples::
14
14
15 [blackbox]
15 [blackbox]
16 track = *
16 track = *
17
17
18 [blackbox]
18 [blackbox]
19 track = command, commandfinish, commandexception, exthook, pythonhook
19 track = command, commandfinish, commandexception, exthook, pythonhook
20
20
21 [blackbox]
21 [blackbox]
22 track = incoming
22 track = incoming
23
23
24 [blackbox]
24 [blackbox]
25 # limit the size of a log file
25 # limit the size of a log file
26 maxsize = 1.5 MB
26 maxsize = 1.5 MB
27 # rotate up to N log files when the current one gets too big
27 # rotate up to N log files when the current one gets too big
28 maxfiles = 3
28 maxfiles = 3
29
29
30 """
30 """
31
31
32 from mercurial import util, cmdutil
32 from mercurial import util, cmdutil
33 from mercurial.i18n import _
33 from mercurial.i18n import _
34 import errno, os, re
34 import errno, os, re
35
35
36 cmdtable = {}
36 cmdtable = {}
37 command = cmdutil.command(cmdtable)
37 command = cmdutil.command(cmdtable)
38 testedwith = 'internal'
38 testedwith = 'internal'
39 lastblackbox = None
39 lastblackbox = None
40
40
41 def wrapui(ui):
41 def wrapui(ui):
42 class blackboxui(ui.__class__):
42 class blackboxui(ui.__class__):
43 @util.propertycache
43 @util.propertycache
44 def track(self):
44 def track(self):
45 return self.configlist('blackbox', 'track', ['*'])
45 return self.configlist('blackbox', 'track', ['*'])
46
46
47 def _openlogfile(self):
47 def _openlogfile(self):
48 def rotate(oldpath, newpath):
48 def rotate(oldpath, newpath):
49 try:
49 try:
50 os.unlink(newpath)
50 os.unlink(newpath)
51 except OSError, err:
51 except OSError, err:
52 if err.errno != errno.ENOENT:
52 if err.errno != errno.ENOENT:
53 self.debug("warning: cannot remove '%s': %s\n" %
53 self.debug("warning: cannot remove '%s': %s\n" %
54 (newpath, err.strerror))
54 (newpath, err.strerror))
55 try:
55 try:
56 if newpath:
56 if newpath:
57 os.rename(oldpath, newpath)
57 os.rename(oldpath, newpath)
58 except OSError, err:
58 except OSError, err:
59 if err.errno != errno.ENOENT:
59 if err.errno != errno.ENOENT:
60 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
60 self.debug("warning: cannot rename '%s' to '%s': %s\n" %
61 (newpath, oldpath, err.strerror))
61 (newpath, oldpath, err.strerror))
62
62
63 fp = self._bbopener('blackbox.log', 'a')
63 fp = self._bbopener('blackbox.log', 'a')
64 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
64 maxsize = self.configbytes('blackbox', 'maxsize', 1048576)
65 if maxsize > 0:
65 if maxsize > 0:
66 st = os.fstat(fp.fileno())
66 st = os.fstat(fp.fileno())
67 if st.st_size >= maxsize:
67 if st.st_size >= maxsize:
68 path = fp.name
68 path = fp.name
69 fp.close()
69 fp.close()
70 maxfiles = self.configint('blackbox', 'maxfiles', 7)
70 maxfiles = self.configint('blackbox', 'maxfiles', 7)
71 for i in xrange(maxfiles - 1, 1, -1):
71 for i in xrange(maxfiles - 1, 1, -1):
72 rotate(oldpath='%s.%d' % (path, i - 1),
72 rotate(oldpath='%s.%d' % (path, i - 1),
73 newpath='%s.%d' % (path, i))
73 newpath='%s.%d' % (path, i))
74 rotate(oldpath=path,
74 rotate(oldpath=path,
75 newpath=maxfiles > 0 and path + '.1')
75 newpath=maxfiles > 0 and path + '.1')
76 fp = self._bbopener('blackbox.log', 'a')
76 fp = self._bbopener('blackbox.log', 'a')
77 return fp
77 return fp
78
78
79 def log(self, event, *msg, **opts):
79 def log(self, event, *msg, **opts):
80 global lastblackbox
80 global lastblackbox
81 super(blackboxui, self).log(event, *msg, **opts)
81 super(blackboxui, self).log(event, *msg, **opts)
82
82
83 if not '*' in self.track and not event in self.track:
83 if not '*' in self.track and not event in self.track:
84 return
84 return
85
85
86 if util.safehasattr(self, '_blackbox'):
86 if util.safehasattr(self, '_blackbox'):
87 blackbox = self._blackbox
87 blackbox = self._blackbox
88 elif util.safehasattr(self, '_bbopener'):
88 elif util.safehasattr(self, '_bbopener'):
89 try:
89 try:
90 self._blackbox = self._openlogfile()
90 self._blackbox = self._openlogfile()
91 except (IOError, OSError), err:
91 except (IOError, OSError), err:
92 self.debug('warning: cannot write to blackbox.log: %s\n' %
92 self.debug('warning: cannot write to blackbox.log: %s\n' %
93 err.strerror)
93 err.strerror)
94 del self._bbopener
94 del self._bbopener
95 self._blackbox = None
95 self._blackbox = None
96 blackbox = self._blackbox
96 blackbox = self._blackbox
97 else:
97 else:
98 # certain ui instances exist outside the context of
98 # certain ui instances exist outside the context of
99 # a repo, so just default to the last blackbox that
99 # a repo, so just default to the last blackbox that
100 # was seen.
100 # was seen.
101 blackbox = lastblackbox
101 blackbox = lastblackbox
102
102
103 if blackbox:
103 if blackbox:
104 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
104 date = util.datestr(None, '%Y/%m/%d %H:%M:%S')
105 user = util.getuser()
105 user = util.getuser()
106 formattedmsg = msg[0] % msg[1:]
106 formattedmsg = msg[0] % msg[1:]
107 try:
107 try:
108 blackbox.write('%s %s> %s' % (date, user, formattedmsg))
108 blackbox.write('%s %s> %s' % (date, user, formattedmsg))
109 except IOError, err:
109 except IOError, err:
110 self.debug('warning: cannot write to blackbox.log: %s\n' %
110 self.debug('warning: cannot write to blackbox.log: %s\n' %
111 err.strerror)
111 err.strerror)
112 lastblackbox = blackbox
112 lastblackbox = blackbox
113
113
114 def setrepo(self, repo):
114 def setrepo(self, repo):
115 self._bbopener = repo.opener
115 self._bbopener = repo.vfs
116
116
117 ui.__class__ = blackboxui
117 ui.__class__ = blackboxui
118
118
119 def uisetup(ui):
119 def uisetup(ui):
120 wrapui(ui)
120 wrapui(ui)
121
121
122 def reposetup(ui, repo):
122 def reposetup(ui, repo):
123 # During 'hg pull' a httppeer repo is created to represent the remote repo.
123 # During 'hg pull' a httppeer repo is created to represent the remote repo.
124 # It doesn't have a .hg directory to put a blackbox in, so we don't do
124 # It doesn't have a .hg directory to put a blackbox in, so we don't do
125 # the blackbox setup for it.
125 # the blackbox setup for it.
126 if not repo.local():
126 if not repo.local():
127 return
127 return
128
128
129 if util.safehasattr(ui, 'setrepo'):
129 if util.safehasattr(ui, 'setrepo'):
130 ui.setrepo(repo)
130 ui.setrepo(repo)
131
131
132 @command('^blackbox',
132 @command('^blackbox',
133 [('l', 'limit', 10, _('the number of events to show')),
133 [('l', 'limit', 10, _('the number of events to show')),
134 ],
134 ],
135 _('hg blackbox [OPTION]...'))
135 _('hg blackbox [OPTION]...'))
136 def blackbox(ui, repo, *revs, **opts):
136 def blackbox(ui, repo, *revs, **opts):
137 '''view the recent repository events
137 '''view the recent repository events
138 '''
138 '''
139
139
140 if not os.path.exists(repo.join('blackbox.log')):
140 if not os.path.exists(repo.join('blackbox.log')):
141 return
141 return
142
142
143 limit = opts.get('limit')
143 limit = opts.get('limit')
144 blackbox = repo.opener('blackbox.log', 'r')
144 blackbox = repo.vfs('blackbox.log', 'r')
145 lines = blackbox.read().split('\n')
145 lines = blackbox.read().split('\n')
146
146
147 count = 0
147 count = 0
148 output = []
148 output = []
149 for line in reversed(lines):
149 for line in reversed(lines):
150 if count >= limit:
150 if count >= limit:
151 break
151 break
152
152
153 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
153 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
154 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
154 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
155 count += 1
155 count += 1
156 output.append(line)
156 output.append(line)
157
157
158 ui.status('\n'.join(reversed(output)))
158 ui.status('\n'.join(reversed(output)))
@@ -1,350 +1,350 b''
1 """automatically manage newlines in repository files
1 """automatically manage newlines in repository files
2
2
3 This extension allows you to manage the type of line endings (CRLF or
3 This extension allows you to manage the type of line endings (CRLF or
4 LF) that are used in the repository and in the local working
4 LF) that are used in the repository and in the local working
5 directory. That way you can get CRLF line endings on Windows and LF on
5 directory. That way you can get CRLF line endings on Windows and LF on
6 Unix/Mac, thereby letting everybody use their OS native line endings.
6 Unix/Mac, thereby letting everybody use their OS native line endings.
7
7
8 The extension reads its configuration from a versioned ``.hgeol``
8 The extension reads its configuration from a versioned ``.hgeol``
9 configuration file found in the root of the working copy. The
9 configuration file found in the root of the working copy. The
10 ``.hgeol`` file use the same syntax as all other Mercurial
10 ``.hgeol`` file use the same syntax as all other Mercurial
11 configuration files. It uses two sections, ``[patterns]`` and
11 configuration files. It uses two sections, ``[patterns]`` and
12 ``[repository]``.
12 ``[repository]``.
13
13
14 The ``[patterns]`` section specifies how line endings should be
14 The ``[patterns]`` section specifies how line endings should be
15 converted between the working copy and the repository. The format is
15 converted between the working copy and the repository. The format is
16 specified by a file pattern. The first match is used, so put more
16 specified by a file pattern. The first match is used, so put more
17 specific patterns first. The available line endings are ``LF``,
17 specific patterns first. The available line endings are ``LF``,
18 ``CRLF``, and ``BIN``.
18 ``CRLF``, and ``BIN``.
19
19
20 Files with the declared format of ``CRLF`` or ``LF`` are always
20 Files with the declared format of ``CRLF`` or ``LF`` are always
21 checked out and stored in the repository in that format and files
21 checked out and stored in the repository in that format and files
22 declared to be binary (``BIN``) are left unchanged. Additionally,
22 declared to be binary (``BIN``) are left unchanged. Additionally,
23 ``native`` is an alias for checking out in the platform's default line
23 ``native`` is an alias for checking out in the platform's default line
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
24 ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
25 Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
26 default behaviour; it is only needed if you need to override a later,
26 default behaviour; it is only needed if you need to override a later,
27 more general pattern.
27 more general pattern.
28
28
29 The optional ``[repository]`` section specifies the line endings to
29 The optional ``[repository]`` section specifies the line endings to
30 use for files stored in the repository. It has a single setting,
30 use for files stored in the repository. It has a single setting,
31 ``native``, which determines the storage line endings for files
31 ``native``, which determines the storage line endings for files
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
32 declared as ``native`` in the ``[patterns]`` section. It can be set to
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
33 ``LF`` or ``CRLF``. The default is ``LF``. For example, this means
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
34 that on Windows, files configured as ``native`` (``CRLF`` by default)
35 will be converted to ``LF`` when stored in the repository. Files
35 will be converted to ``LF`` when stored in the repository. Files
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
36 declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
37 are always stored as-is in the repository.
37 are always stored as-is in the repository.
38
38
39 Example versioned ``.hgeol`` file::
39 Example versioned ``.hgeol`` file::
40
40
41 [patterns]
41 [patterns]
42 **.py = native
42 **.py = native
43 **.vcproj = CRLF
43 **.vcproj = CRLF
44 **.txt = native
44 **.txt = native
45 Makefile = LF
45 Makefile = LF
46 **.jpg = BIN
46 **.jpg = BIN
47
47
48 [repository]
48 [repository]
49 native = LF
49 native = LF
50
50
51 .. note::
51 .. note::
52
52
53 The rules will first apply when files are touched in the working
53 The rules will first apply when files are touched in the working
54 copy, e.g. by updating to null and back to tip to touch all files.
54 copy, e.g. by updating to null and back to tip to touch all files.
55
55
56 The extension uses an optional ``[eol]`` section read from both the
56 The extension uses an optional ``[eol]`` section read from both the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
57 normal Mercurial configuration files and the ``.hgeol`` file, with the
58 latter overriding the former. You can use that section to control the
58 latter overriding the former. You can use that section to control the
59 overall behavior. There are three settings:
59 overall behavior. There are three settings:
60
60
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
61 - ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
62 ``CRLF`` to override the default interpretation of ``native`` for
62 ``CRLF`` to override the default interpretation of ``native`` for
63 checkout. This can be used with :hg:`archive` on Unix, say, to
63 checkout. This can be used with :hg:`archive` on Unix, say, to
64 generate an archive where files have line endings for Windows.
64 generate an archive where files have line endings for Windows.
65
65
66 - ``eol.only-consistent`` (default True) can be set to False to make
66 - ``eol.only-consistent`` (default True) can be set to False to make
67 the extension convert files with inconsistent EOLs. Inconsistent
67 the extension convert files with inconsistent EOLs. Inconsistent
68 means that there is both ``CRLF`` and ``LF`` present in the file.
68 means that there is both ``CRLF`` and ``LF`` present in the file.
69 Such files are normally not touched under the assumption that they
69 Such files are normally not touched under the assumption that they
70 have mixed EOLs on purpose.
70 have mixed EOLs on purpose.
71
71
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
72 - ``eol.fix-trailing-newline`` (default False) can be set to True to
73 ensure that converted files end with a EOL character (either ``\\n``
73 ensure that converted files end with a EOL character (either ``\\n``
74 or ``\\r\\n`` as per the configured patterns).
74 or ``\\r\\n`` as per the configured patterns).
75
75
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
76 The extension provides ``cleverencode:`` and ``cleverdecode:`` filters
77 like the deprecated win32text extension does. This means that you can
77 like the deprecated win32text extension does. This means that you can
78 disable win32text and enable eol and your filters will still work. You
78 disable win32text and enable eol and your filters will still work. You
79 only need to these filters until you have prepared a ``.hgeol`` file.
79 only need to these filters until you have prepared a ``.hgeol`` file.
80
80
81 The ``win32text.forbid*`` hooks provided by the win32text extension
81 The ``win32text.forbid*`` hooks provided by the win32text extension
82 have been unified into a single hook named ``eol.checkheadshook``. The
82 have been unified into a single hook named ``eol.checkheadshook``. The
83 hook will lookup the expected line endings from the ``.hgeol`` file,
83 hook will lookup the expected line endings from the ``.hgeol`` file,
84 which means you must migrate to a ``.hgeol`` file first before using
84 which means you must migrate to a ``.hgeol`` file first before using
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
85 the hook. ``eol.checkheadshook`` only checks heads, intermediate
86 invalid revisions will be pushed. To forbid them completely, use the
86 invalid revisions will be pushed. To forbid them completely, use the
87 ``eol.checkallhook`` hook. These hooks are best used as
87 ``eol.checkallhook`` hook. These hooks are best used as
88 ``pretxnchangegroup`` hooks.
88 ``pretxnchangegroup`` hooks.
89
89
90 See :hg:`help patterns` for more information about the glob patterns
90 See :hg:`help patterns` for more information about the glob patterns
91 used.
91 used.
92 """
92 """
93
93
94 from mercurial.i18n import _
94 from mercurial.i18n import _
95 from mercurial import util, config, extensions, match, error
95 from mercurial import util, config, extensions, match, error
96 import re, os
96 import re, os
97
97
98 testedwith = 'internal'
98 testedwith = 'internal'
99
99
100 # Matches a lone LF, i.e., one that is not part of CRLF.
100 # Matches a lone LF, i.e., one that is not part of CRLF.
101 singlelf = re.compile('(^|[^\r])\n')
101 singlelf = re.compile('(^|[^\r])\n')
102 # Matches a single EOL which can either be a CRLF where repeated CR
102 # Matches a single EOL which can either be a CRLF where repeated CR
103 # are removed or a LF. We do not care about old Macintosh files, so a
103 # are removed or a LF. We do not care about old Macintosh files, so a
104 # stray CR is an error.
104 # stray CR is an error.
105 eolre = re.compile('\r*\n')
105 eolre = re.compile('\r*\n')
106
106
107
107
108 def inconsistenteol(data):
108 def inconsistenteol(data):
109 return '\r\n' in data and singlelf.search(data)
109 return '\r\n' in data and singlelf.search(data)
110
110
111 def tolf(s, params, ui, **kwargs):
111 def tolf(s, params, ui, **kwargs):
112 """Filter to convert to LF EOLs."""
112 """Filter to convert to LF EOLs."""
113 if util.binary(s):
113 if util.binary(s):
114 return s
114 return s
115 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
115 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
116 return s
116 return s
117 if (ui.configbool('eol', 'fix-trailing-newline', False)
117 if (ui.configbool('eol', 'fix-trailing-newline', False)
118 and s and s[-1] != '\n'):
118 and s and s[-1] != '\n'):
119 s = s + '\n'
119 s = s + '\n'
120 return eolre.sub('\n', s)
120 return eolre.sub('\n', s)
121
121
122 def tocrlf(s, params, ui, **kwargs):
122 def tocrlf(s, params, ui, **kwargs):
123 """Filter to convert to CRLF EOLs."""
123 """Filter to convert to CRLF EOLs."""
124 if util.binary(s):
124 if util.binary(s):
125 return s
125 return s
126 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
126 if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
127 return s
127 return s
128 if (ui.configbool('eol', 'fix-trailing-newline', False)
128 if (ui.configbool('eol', 'fix-trailing-newline', False)
129 and s and s[-1] != '\n'):
129 and s and s[-1] != '\n'):
130 s = s + '\n'
130 s = s + '\n'
131 return eolre.sub('\r\n', s)
131 return eolre.sub('\r\n', s)
132
132
133 def isbinary(s, params):
133 def isbinary(s, params):
134 """Filter to do nothing with the file."""
134 """Filter to do nothing with the file."""
135 return s
135 return s
136
136
137 filters = {
137 filters = {
138 'to-lf': tolf,
138 'to-lf': tolf,
139 'to-crlf': tocrlf,
139 'to-crlf': tocrlf,
140 'is-binary': isbinary,
140 'is-binary': isbinary,
141 # The following provide backwards compatibility with win32text
141 # The following provide backwards compatibility with win32text
142 'cleverencode:': tolf,
142 'cleverencode:': tolf,
143 'cleverdecode:': tocrlf
143 'cleverdecode:': tocrlf
144 }
144 }
145
145
146 class eolfile(object):
146 class eolfile(object):
147 def __init__(self, ui, root, data):
147 def __init__(self, ui, root, data):
148 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
148 self._decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
149 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
149 self._encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
150
150
151 self.cfg = config.config()
151 self.cfg = config.config()
152 # Our files should not be touched. The pattern must be
152 # Our files should not be touched. The pattern must be
153 # inserted first override a '** = native' pattern.
153 # inserted first override a '** = native' pattern.
154 self.cfg.set('patterns', '.hg*', 'BIN', 'eol')
154 self.cfg.set('patterns', '.hg*', 'BIN', 'eol')
155 # We can then parse the user's patterns.
155 # We can then parse the user's patterns.
156 self.cfg.parse('.hgeol', data)
156 self.cfg.parse('.hgeol', data)
157
157
158 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
158 isrepolf = self.cfg.get('repository', 'native') != 'CRLF'
159 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
159 self._encode['NATIVE'] = isrepolf and 'to-lf' or 'to-crlf'
160 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
160 iswdlf = ui.config('eol', 'native', os.linesep) in ('LF', '\n')
161 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
161 self._decode['NATIVE'] = iswdlf and 'to-lf' or 'to-crlf'
162
162
163 include = []
163 include = []
164 exclude = []
164 exclude = []
165 for pattern, style in self.cfg.items('patterns'):
165 for pattern, style in self.cfg.items('patterns'):
166 key = style.upper()
166 key = style.upper()
167 if key == 'BIN':
167 if key == 'BIN':
168 exclude.append(pattern)
168 exclude.append(pattern)
169 else:
169 else:
170 include.append(pattern)
170 include.append(pattern)
171 # This will match the files for which we need to care
171 # This will match the files for which we need to care
172 # about inconsistent newlines.
172 # about inconsistent newlines.
173 self.match = match.match(root, '', [], include, exclude)
173 self.match = match.match(root, '', [], include, exclude)
174
174
175 def copytoui(self, ui):
175 def copytoui(self, ui):
176 for pattern, style in self.cfg.items('patterns'):
176 for pattern, style in self.cfg.items('patterns'):
177 key = style.upper()
177 key = style.upper()
178 try:
178 try:
179 ui.setconfig('decode', pattern, self._decode[key], 'eol')
179 ui.setconfig('decode', pattern, self._decode[key], 'eol')
180 ui.setconfig('encode', pattern, self._encode[key], 'eol')
180 ui.setconfig('encode', pattern, self._encode[key], 'eol')
181 except KeyError:
181 except KeyError:
182 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
182 ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
183 % (style, self.cfg.source('patterns', pattern)))
183 % (style, self.cfg.source('patterns', pattern)))
184 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
184 # eol.only-consistent can be specified in ~/.hgrc or .hgeol
185 for k, v in self.cfg.items('eol'):
185 for k, v in self.cfg.items('eol'):
186 ui.setconfig('eol', k, v, 'eol')
186 ui.setconfig('eol', k, v, 'eol')
187
187
188 def checkrev(self, repo, ctx, files):
188 def checkrev(self, repo, ctx, files):
189 failed = []
189 failed = []
190 for f in (files or ctx.files()):
190 for f in (files or ctx.files()):
191 if f not in ctx:
191 if f not in ctx:
192 continue
192 continue
193 for pattern, style in self.cfg.items('patterns'):
193 for pattern, style in self.cfg.items('patterns'):
194 if not match.match(repo.root, '', [pattern])(f):
194 if not match.match(repo.root, '', [pattern])(f):
195 continue
195 continue
196 target = self._encode[style.upper()]
196 target = self._encode[style.upper()]
197 data = ctx[f].data()
197 data = ctx[f].data()
198 if (target == "to-lf" and "\r\n" in data
198 if (target == "to-lf" and "\r\n" in data
199 or target == "to-crlf" and singlelf.search(data)):
199 or target == "to-crlf" and singlelf.search(data)):
200 failed.append((str(ctx), target, f))
200 failed.append((str(ctx), target, f))
201 break
201 break
202 return failed
202 return failed
203
203
204 def parseeol(ui, repo, nodes):
204 def parseeol(ui, repo, nodes):
205 try:
205 try:
206 for node in nodes:
206 for node in nodes:
207 try:
207 try:
208 if node is None:
208 if node is None:
209 # Cannot use workingctx.data() since it would load
209 # Cannot use workingctx.data() since it would load
210 # and cache the filters before we configure them.
210 # and cache the filters before we configure them.
211 data = repo.wfile('.hgeol').read()
211 data = repo.wfile('.hgeol').read()
212 else:
212 else:
213 data = repo[node]['.hgeol'].data()
213 data = repo[node]['.hgeol'].data()
214 return eolfile(ui, repo.root, data)
214 return eolfile(ui, repo.root, data)
215 except (IOError, LookupError):
215 except (IOError, LookupError):
216 pass
216 pass
217 except error.ParseError, inst:
217 except error.ParseError, inst:
218 ui.warn(_("warning: ignoring .hgeol file due to parse error "
218 ui.warn(_("warning: ignoring .hgeol file due to parse error "
219 "at %s: %s\n") % (inst.args[1], inst.args[0]))
219 "at %s: %s\n") % (inst.args[1], inst.args[0]))
220 return None
220 return None
221
221
222 def _checkhook(ui, repo, node, headsonly):
222 def _checkhook(ui, repo, node, headsonly):
223 # Get revisions to check and touched files at the same time
223 # Get revisions to check and touched files at the same time
224 files = set()
224 files = set()
225 revs = set()
225 revs = set()
226 for rev in xrange(repo[node].rev(), len(repo)):
226 for rev in xrange(repo[node].rev(), len(repo)):
227 revs.add(rev)
227 revs.add(rev)
228 if headsonly:
228 if headsonly:
229 ctx = repo[rev]
229 ctx = repo[rev]
230 files.update(ctx.files())
230 files.update(ctx.files())
231 for pctx in ctx.parents():
231 for pctx in ctx.parents():
232 revs.discard(pctx.rev())
232 revs.discard(pctx.rev())
233 failed = []
233 failed = []
234 for rev in revs:
234 for rev in revs:
235 ctx = repo[rev]
235 ctx = repo[rev]
236 eol = parseeol(ui, repo, [ctx.node()])
236 eol = parseeol(ui, repo, [ctx.node()])
237 if eol:
237 if eol:
238 failed.extend(eol.checkrev(repo, ctx, files))
238 failed.extend(eol.checkrev(repo, ctx, files))
239
239
240 if failed:
240 if failed:
241 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
241 eols = {'to-lf': 'CRLF', 'to-crlf': 'LF'}
242 msgs = []
242 msgs = []
243 for node, target, f in failed:
243 for node, target, f in failed:
244 msgs.append(_(" %s in %s should not have %s line endings") %
244 msgs.append(_(" %s in %s should not have %s line endings") %
245 (f, node, eols[target]))
245 (f, node, eols[target]))
246 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
246 raise util.Abort(_("end-of-line check failed:\n") + "\n".join(msgs))
247
247
248 def checkallhook(ui, repo, node, hooktype, **kwargs):
248 def checkallhook(ui, repo, node, hooktype, **kwargs):
249 """verify that files have expected EOLs"""
249 """verify that files have expected EOLs"""
250 _checkhook(ui, repo, node, False)
250 _checkhook(ui, repo, node, False)
251
251
252 def checkheadshook(ui, repo, node, hooktype, **kwargs):
252 def checkheadshook(ui, repo, node, hooktype, **kwargs):
253 """verify that files have expected EOLs"""
253 """verify that files have expected EOLs"""
254 _checkhook(ui, repo, node, True)
254 _checkhook(ui, repo, node, True)
255
255
256 # "checkheadshook" used to be called "hook"
256 # "checkheadshook" used to be called "hook"
257 hook = checkheadshook
257 hook = checkheadshook
258
258
259 def preupdate(ui, repo, hooktype, parent1, parent2):
259 def preupdate(ui, repo, hooktype, parent1, parent2):
260 repo.loadeol([parent1])
260 repo.loadeol([parent1])
261 return False
261 return False
262
262
263 def uisetup(ui):
263 def uisetup(ui):
264 ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
264 ui.setconfig('hooks', 'preupdate.eol', preupdate, 'eol')
265
265
266 def extsetup(ui):
266 def extsetup(ui):
267 try:
267 try:
268 extensions.find('win32text')
268 extensions.find('win32text')
269 ui.warn(_("the eol extension is incompatible with the "
269 ui.warn(_("the eol extension is incompatible with the "
270 "win32text extension\n"))
270 "win32text extension\n"))
271 except KeyError:
271 except KeyError:
272 pass
272 pass
273
273
274
274
275 def reposetup(ui, repo):
275 def reposetup(ui, repo):
276 uisetup(repo.ui)
276 uisetup(repo.ui)
277
277
278 if not repo.local():
278 if not repo.local():
279 return
279 return
280 for name, fn in filters.iteritems():
280 for name, fn in filters.iteritems():
281 repo.adddatafilter(name, fn)
281 repo.adddatafilter(name, fn)
282
282
283 ui.setconfig('patch', 'eol', 'auto', 'eol')
283 ui.setconfig('patch', 'eol', 'auto', 'eol')
284
284
285 class eolrepo(repo.__class__):
285 class eolrepo(repo.__class__):
286
286
287 def loadeol(self, nodes):
287 def loadeol(self, nodes):
288 eol = parseeol(self.ui, self, nodes)
288 eol = parseeol(self.ui, self, nodes)
289 if eol is None:
289 if eol is None:
290 return None
290 return None
291 eol.copytoui(self.ui)
291 eol.copytoui(self.ui)
292 return eol.match
292 return eol.match
293
293
294 def _hgcleardirstate(self):
294 def _hgcleardirstate(self):
295 self._eolfile = self.loadeol([None, 'tip'])
295 self._eolfile = self.loadeol([None, 'tip'])
296 if not self._eolfile:
296 if not self._eolfile:
297 self._eolfile = util.never
297 self._eolfile = util.never
298 return
298 return
299
299
300 try:
300 try:
301 cachemtime = os.path.getmtime(self.join("eol.cache"))
301 cachemtime = os.path.getmtime(self.join("eol.cache"))
302 except OSError:
302 except OSError:
303 cachemtime = 0
303 cachemtime = 0
304
304
305 try:
305 try:
306 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
306 eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
307 except OSError:
307 except OSError:
308 eolmtime = 0
308 eolmtime = 0
309
309
310 if eolmtime > cachemtime:
310 if eolmtime > cachemtime:
311 self.ui.debug("eol: detected change in .hgeol\n")
311 self.ui.debug("eol: detected change in .hgeol\n")
312 wlock = None
312 wlock = None
313 try:
313 try:
314 wlock = self.wlock()
314 wlock = self.wlock()
315 for f in self.dirstate:
315 for f in self.dirstate:
316 if self.dirstate[f] == 'n':
316 if self.dirstate[f] == 'n':
317 # all normal files need to be looked at
317 # all normal files need to be looked at
318 # again since the new .hgeol file might no
318 # again since the new .hgeol file might no
319 # longer match a file it matched before
319 # longer match a file it matched before
320 self.dirstate.normallookup(f)
320 self.dirstate.normallookup(f)
321 # Create or touch the cache to update mtime
321 # Create or touch the cache to update mtime
322 self.opener("eol.cache", "w").close()
322 self.vfs("eol.cache", "w").close()
323 wlock.release()
323 wlock.release()
324 except error.LockUnavailable:
324 except error.LockUnavailable:
325 # If we cannot lock the repository and clear the
325 # If we cannot lock the repository and clear the
326 # dirstate, then a commit might not see all files
326 # dirstate, then a commit might not see all files
327 # as modified. But if we cannot lock the
327 # as modified. But if we cannot lock the
328 # repository, then we can also not make a commit,
328 # repository, then we can also not make a commit,
329 # so ignore the error.
329 # so ignore the error.
330 pass
330 pass
331
331
332 def commitctx(self, ctx, error=False):
332 def commitctx(self, ctx, error=False):
333 for f in sorted(ctx.added() + ctx.modified()):
333 for f in sorted(ctx.added() + ctx.modified()):
334 if not self._eolfile(f):
334 if not self._eolfile(f):
335 continue
335 continue
336 fctx = ctx[f]
336 fctx = ctx[f]
337 if fctx is None:
337 if fctx is None:
338 continue
338 continue
339 data = fctx.data()
339 data = fctx.data()
340 if util.binary(data):
340 if util.binary(data):
341 # We should not abort here, since the user should
341 # We should not abort here, since the user should
342 # be able to say "** = native" to automatically
342 # be able to say "** = native" to automatically
343 # have all non-binary files taken care of.
343 # have all non-binary files taken care of.
344 continue
344 continue
345 if inconsistenteol(data):
345 if inconsistenteol(data):
346 raise util.Abort(_("inconsistent newline style "
346 raise util.Abort(_("inconsistent newline style "
347 "in %s\n") % f)
347 "in %s\n") % f)
348 return super(eolrepo, self).commitctx(ctx, error)
348 return super(eolrepo, self).commitctx(ctx, error)
349 repo.__class__ = eolrepo
349 repo.__class__ = eolrepo
350 repo._hgcleardirstate()
350 repo._hgcleardirstate()
@@ -1,297 +1,297 b''
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
1 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 '''commands to sign and verify changesets'''
6 '''commands to sign and verify changesets'''
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match, cmdutil
9 from mercurial import util, commands, match, cmdutil
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 cmdtable = {}
13 cmdtable = {}
14 command = cmdutil.command(cmdtable)
14 command = cmdutil.command(cmdtable)
15 testedwith = 'internal'
15 testedwith = 'internal'
16
16
17 class gpg(object):
17 class gpg(object):
18 def __init__(self, path, key=None):
18 def __init__(self, path, key=None):
19 self.path = path
19 self.path = path
20 self.key = (key and " --local-user \"%s\"" % key) or ""
20 self.key = (key and " --local-user \"%s\"" % key) or ""
21
21
22 def sign(self, data):
22 def sign(self, data):
23 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
23 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
24 return util.filter(data, gpgcmd)
24 return util.filter(data, gpgcmd)
25
25
26 def verify(self, data, sig):
26 def verify(self, data, sig):
27 """ returns of the good and bad signatures"""
27 """ returns of the good and bad signatures"""
28 sigfile = datafile = None
28 sigfile = datafile = None
29 try:
29 try:
30 # create temporary files
30 # create temporary files
31 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
31 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(sig)
33 fp.write(sig)
34 fp.close()
34 fp.close()
35 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
35 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
36 fp = os.fdopen(fd, 'wb')
36 fp = os.fdopen(fd, 'wb')
37 fp.write(data)
37 fp.write(data)
38 fp.close()
38 fp.close()
39 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
39 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
40 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
40 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
41 ret = util.filter("", gpgcmd)
41 ret = util.filter("", gpgcmd)
42 finally:
42 finally:
43 for f in (sigfile, datafile):
43 for f in (sigfile, datafile):
44 try:
44 try:
45 if f:
45 if f:
46 os.unlink(f)
46 os.unlink(f)
47 except OSError:
47 except OSError:
48 pass
48 pass
49 keys = []
49 keys = []
50 key, fingerprint = None, None
50 key, fingerprint = None, None
51 for l in ret.splitlines():
51 for l in ret.splitlines():
52 # see DETAILS in the gnupg documentation
52 # see DETAILS in the gnupg documentation
53 # filter the logger output
53 # filter the logger output
54 if not l.startswith("[GNUPG:]"):
54 if not l.startswith("[GNUPG:]"):
55 continue
55 continue
56 l = l[9:]
56 l = l[9:]
57 if l.startswith("VALIDSIG"):
57 if l.startswith("VALIDSIG"):
58 # fingerprint of the primary key
58 # fingerprint of the primary key
59 fingerprint = l.split()[10]
59 fingerprint = l.split()[10]
60 elif l.startswith("ERRSIG"):
60 elif l.startswith("ERRSIG"):
61 key = l.split(" ", 3)[:2]
61 key = l.split(" ", 3)[:2]
62 key.append("")
62 key.append("")
63 fingerprint = None
63 fingerprint = None
64 elif (l.startswith("GOODSIG") or
64 elif (l.startswith("GOODSIG") or
65 l.startswith("EXPSIG") or
65 l.startswith("EXPSIG") or
66 l.startswith("EXPKEYSIG") or
66 l.startswith("EXPKEYSIG") or
67 l.startswith("BADSIG")):
67 l.startswith("BADSIG")):
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 key = l.split(" ", 2)
70 key = l.split(" ", 2)
71 fingerprint = None
71 fingerprint = None
72 if key is not None:
72 if key is not None:
73 keys.append(key + [fingerprint])
73 keys.append(key + [fingerprint])
74 return keys
74 return keys
75
75
76 def newgpg(ui, **opts):
76 def newgpg(ui, **opts):
77 """create a new gpg instance"""
77 """create a new gpg instance"""
78 gpgpath = ui.config("gpg", "cmd", "gpg")
78 gpgpath = ui.config("gpg", "cmd", "gpg")
79 gpgkey = opts.get('key')
79 gpgkey = opts.get('key')
80 if not gpgkey:
80 if not gpgkey:
81 gpgkey = ui.config("gpg", "key", None)
81 gpgkey = ui.config("gpg", "key", None)
82 return gpg(gpgpath, gpgkey)
82 return gpg(gpgpath, gpgkey)
83
83
84 def sigwalk(repo):
84 def sigwalk(repo):
85 """
85 """
86 walk over every sigs, yields a couple
86 walk over every sigs, yields a couple
87 ((node, version, sig), (filename, linenumber))
87 ((node, version, sig), (filename, linenumber))
88 """
88 """
89 def parsefile(fileiter, context):
89 def parsefile(fileiter, context):
90 ln = 1
90 ln = 1
91 for l in fileiter:
91 for l in fileiter:
92 if not l:
92 if not l:
93 continue
93 continue
94 yield (l.split(" ", 2), (context, ln))
94 yield (l.split(" ", 2), (context, ln))
95 ln += 1
95 ln += 1
96
96
97 # read the heads
97 # read the heads
98 fl = repo.file(".hgsigs")
98 fl = repo.file(".hgsigs")
99 for r in reversed(fl.heads()):
99 for r in reversed(fl.heads()):
100 fn = ".hgsigs|%s" % hgnode.short(r)
100 fn = ".hgsigs|%s" % hgnode.short(r)
101 for item in parsefile(fl.read(r).splitlines(), fn):
101 for item in parsefile(fl.read(r).splitlines(), fn):
102 yield item
102 yield item
103 try:
103 try:
104 # read local signatures
104 # read local signatures
105 fn = "localsigs"
105 fn = "localsigs"
106 for item in parsefile(repo.opener(fn), fn):
106 for item in parsefile(repo.vfs(fn), fn):
107 yield item
107 yield item
108 except IOError:
108 except IOError:
109 pass
109 pass
110
110
111 def getkeys(ui, repo, mygpg, sigdata, context):
111 def getkeys(ui, repo, mygpg, sigdata, context):
112 """get the keys who signed a data"""
112 """get the keys who signed a data"""
113 fn, ln = context
113 fn, ln = context
114 node, version, sig = sigdata
114 node, version, sig = sigdata
115 prefix = "%s:%d" % (fn, ln)
115 prefix = "%s:%d" % (fn, ln)
116 node = hgnode.bin(node)
116 node = hgnode.bin(node)
117
117
118 data = node2txt(repo, node, version)
118 data = node2txt(repo, node, version)
119 sig = binascii.a2b_base64(sig)
119 sig = binascii.a2b_base64(sig)
120 keys = mygpg.verify(data, sig)
120 keys = mygpg.verify(data, sig)
121
121
122 validkeys = []
122 validkeys = []
123 # warn for expired key and/or sigs
123 # warn for expired key and/or sigs
124 for key in keys:
124 for key in keys:
125 if key[0] == "ERRSIG":
125 if key[0] == "ERRSIG":
126 ui.write(_("%s Unknown key ID \"%s\"\n")
126 ui.write(_("%s Unknown key ID \"%s\"\n")
127 % (prefix, shortkey(ui, key[1][:15])))
127 % (prefix, shortkey(ui, key[1][:15])))
128 continue
128 continue
129 if key[0] == "BADSIG":
129 if key[0] == "BADSIG":
130 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
130 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
131 continue
131 continue
132 if key[0] == "EXPSIG":
132 if key[0] == "EXPSIG":
133 ui.write(_("%s Note: Signature has expired"
133 ui.write(_("%s Note: Signature has expired"
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
134 " (signed by: \"%s\")\n") % (prefix, key[2]))
135 elif key[0] == "EXPKEYSIG":
135 elif key[0] == "EXPKEYSIG":
136 ui.write(_("%s Note: This key has expired"
136 ui.write(_("%s Note: This key has expired"
137 " (signed by: \"%s\")\n") % (prefix, key[2]))
137 " (signed by: \"%s\")\n") % (prefix, key[2]))
138 validkeys.append((key[1], key[2], key[3]))
138 validkeys.append((key[1], key[2], key[3]))
139 return validkeys
139 return validkeys
140
140
141 @command("sigs", [], _('hg sigs'))
141 @command("sigs", [], _('hg sigs'))
142 def sigs(ui, repo):
142 def sigs(ui, repo):
143 """list signed changesets"""
143 """list signed changesets"""
144 mygpg = newgpg(ui)
144 mygpg = newgpg(ui)
145 revs = {}
145 revs = {}
146
146
147 for data, context in sigwalk(repo):
147 for data, context in sigwalk(repo):
148 node, version, sig = data
148 node, version, sig = data
149 fn, ln = context
149 fn, ln = context
150 try:
150 try:
151 n = repo.lookup(node)
151 n = repo.lookup(node)
152 except KeyError:
152 except KeyError:
153 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
153 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
154 continue
154 continue
155 r = repo.changelog.rev(n)
155 r = repo.changelog.rev(n)
156 keys = getkeys(ui, repo, mygpg, data, context)
156 keys = getkeys(ui, repo, mygpg, data, context)
157 if not keys:
157 if not keys:
158 continue
158 continue
159 revs.setdefault(r, [])
159 revs.setdefault(r, [])
160 revs[r].extend(keys)
160 revs[r].extend(keys)
161 for rev in sorted(revs, reverse=True):
161 for rev in sorted(revs, reverse=True):
162 for k in revs[rev]:
162 for k in revs[rev]:
163 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
163 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
164 ui.write("%-30s %s\n" % (keystr(ui, k), r))
164 ui.write("%-30s %s\n" % (keystr(ui, k), r))
165
165
166 @command("sigcheck", [], _('hg sigcheck REV'))
166 @command("sigcheck", [], _('hg sigcheck REV'))
167 def check(ui, repo, rev):
167 def check(ui, repo, rev):
168 """verify all the signatures there may be for a particular revision"""
168 """verify all the signatures there may be for a particular revision"""
169 mygpg = newgpg(ui)
169 mygpg = newgpg(ui)
170 rev = repo.lookup(rev)
170 rev = repo.lookup(rev)
171 hexrev = hgnode.hex(rev)
171 hexrev = hgnode.hex(rev)
172 keys = []
172 keys = []
173
173
174 for data, context in sigwalk(repo):
174 for data, context in sigwalk(repo):
175 node, version, sig = data
175 node, version, sig = data
176 if node == hexrev:
176 if node == hexrev:
177 k = getkeys(ui, repo, mygpg, data, context)
177 k = getkeys(ui, repo, mygpg, data, context)
178 if k:
178 if k:
179 keys.extend(k)
179 keys.extend(k)
180
180
181 if not keys:
181 if not keys:
182 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
182 ui.write(_("no valid signature for %s\n") % hgnode.short(rev))
183 return
183 return
184
184
185 # print summary
185 # print summary
186 ui.write("%s is signed by:\n" % hgnode.short(rev))
186 ui.write("%s is signed by:\n" % hgnode.short(rev))
187 for key in keys:
187 for key in keys:
188 ui.write(" %s\n" % keystr(ui, key))
188 ui.write(" %s\n" % keystr(ui, key))
189
189
190 def keystr(ui, key):
190 def keystr(ui, key):
191 """associate a string to a key (username, comment)"""
191 """associate a string to a key (username, comment)"""
192 keyid, user, fingerprint = key
192 keyid, user, fingerprint = key
193 comment = ui.config("gpg", fingerprint, None)
193 comment = ui.config("gpg", fingerprint, None)
194 if comment:
194 if comment:
195 return "%s (%s)" % (user, comment)
195 return "%s (%s)" % (user, comment)
196 else:
196 else:
197 return user
197 return user
198
198
199 @command("sign",
199 @command("sign",
200 [('l', 'local', None, _('make the signature local')),
200 [('l', 'local', None, _('make the signature local')),
201 ('f', 'force', None, _('sign even if the sigfile is modified')),
201 ('f', 'force', None, _('sign even if the sigfile is modified')),
202 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
202 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
203 ('k', 'key', '',
203 ('k', 'key', '',
204 _('the key id to sign with'), _('ID')),
204 _('the key id to sign with'), _('ID')),
205 ('m', 'message', '',
205 ('m', 'message', '',
206 _('use text as commit message'), _('TEXT')),
206 _('use text as commit message'), _('TEXT')),
207 ('e', 'edit', False, _('invoke editor on commit messages')),
207 ('e', 'edit', False, _('invoke editor on commit messages')),
208 ] + commands.commitopts2,
208 ] + commands.commitopts2,
209 _('hg sign [OPTION]... [REV]...'))
209 _('hg sign [OPTION]... [REV]...'))
210 def sign(ui, repo, *revs, **opts):
210 def sign(ui, repo, *revs, **opts):
211 """add a signature for the current or given revision
211 """add a signature for the current or given revision
212
212
213 If no revision is given, the parent of the working directory is used,
213 If no revision is given, the parent of the working directory is used,
214 or tip if no revision is checked out.
214 or tip if no revision is checked out.
215
215
216 See :hg:`help dates` for a list of formats valid for -d/--date.
216 See :hg:`help dates` for a list of formats valid for -d/--date.
217 """
217 """
218
218
219 mygpg = newgpg(ui, **opts)
219 mygpg = newgpg(ui, **opts)
220 sigver = "0"
220 sigver = "0"
221 sigmessage = ""
221 sigmessage = ""
222
222
223 date = opts.get('date')
223 date = opts.get('date')
224 if date:
224 if date:
225 opts['date'] = util.parsedate(date)
225 opts['date'] = util.parsedate(date)
226
226
227 if revs:
227 if revs:
228 nodes = [repo.lookup(n) for n in revs]
228 nodes = [repo.lookup(n) for n in revs]
229 else:
229 else:
230 nodes = [node for node in repo.dirstate.parents()
230 nodes = [node for node in repo.dirstate.parents()
231 if node != hgnode.nullid]
231 if node != hgnode.nullid]
232 if len(nodes) > 1:
232 if len(nodes) > 1:
233 raise util.Abort(_('uncommitted merge - please provide a '
233 raise util.Abort(_('uncommitted merge - please provide a '
234 'specific revision'))
234 'specific revision'))
235 if not nodes:
235 if not nodes:
236 nodes = [repo.changelog.tip()]
236 nodes = [repo.changelog.tip()]
237
237
238 for n in nodes:
238 for n in nodes:
239 hexnode = hgnode.hex(n)
239 hexnode = hgnode.hex(n)
240 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
240 ui.write(_("signing %d:%s\n") % (repo.changelog.rev(n),
241 hgnode.short(n)))
241 hgnode.short(n)))
242 # build data
242 # build data
243 data = node2txt(repo, n, sigver)
243 data = node2txt(repo, n, sigver)
244 sig = mygpg.sign(data)
244 sig = mygpg.sign(data)
245 if not sig:
245 if not sig:
246 raise util.Abort(_("error while signing"))
246 raise util.Abort(_("error while signing"))
247 sig = binascii.b2a_base64(sig)
247 sig = binascii.b2a_base64(sig)
248 sig = sig.replace("\n", "")
248 sig = sig.replace("\n", "")
249 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
249 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
250
250
251 # write it
251 # write it
252 if opts['local']:
252 if opts['local']:
253 repo.opener.append("localsigs", sigmessage)
253 repo.vfs.append("localsigs", sigmessage)
254 return
254 return
255
255
256 if not opts["force"]:
256 if not opts["force"]:
257 msigs = match.exact(repo.root, '', ['.hgsigs'])
257 msigs = match.exact(repo.root, '', ['.hgsigs'])
258 if util.any(repo.status(match=msigs, unknown=True, ignored=True)):
258 if util.any(repo.status(match=msigs, unknown=True, ignored=True)):
259 raise util.Abort(_("working copy of .hgsigs is changed "),
259 raise util.Abort(_("working copy of .hgsigs is changed "),
260 hint=_("please commit .hgsigs manually"))
260 hint=_("please commit .hgsigs manually"))
261
261
262 sigsfile = repo.wfile(".hgsigs", "ab")
262 sigsfile = repo.wfile(".hgsigs", "ab")
263 sigsfile.write(sigmessage)
263 sigsfile.write(sigmessage)
264 sigsfile.close()
264 sigsfile.close()
265
265
266 if '.hgsigs' not in repo.dirstate:
266 if '.hgsigs' not in repo.dirstate:
267 repo[None].add([".hgsigs"])
267 repo[None].add([".hgsigs"])
268
268
269 if opts["no_commit"]:
269 if opts["no_commit"]:
270 return
270 return
271
271
272 message = opts['message']
272 message = opts['message']
273 if not message:
273 if not message:
274 # we don't translate commit messages
274 # we don't translate commit messages
275 message = "\n".join(["Added signature for changeset %s"
275 message = "\n".join(["Added signature for changeset %s"
276 % hgnode.short(n)
276 % hgnode.short(n)
277 for n in nodes])
277 for n in nodes])
278 try:
278 try:
279 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
279 editor = cmdutil.getcommiteditor(editform='gpg.sign', **opts)
280 repo.commit(message, opts['user'], opts['date'], match=msigs,
280 repo.commit(message, opts['user'], opts['date'], match=msigs,
281 editor=editor)
281 editor=editor)
282 except ValueError, inst:
282 except ValueError, inst:
283 raise util.Abort(str(inst))
283 raise util.Abort(str(inst))
284
284
285 def shortkey(ui, key):
285 def shortkey(ui, key):
286 if len(key) != 16:
286 if len(key) != 16:
287 ui.debug("key ID \"%s\" format error\n" % key)
287 ui.debug("key ID \"%s\" format error\n" % key)
288 return key
288 return key
289
289
290 return key[-8:]
290 return key[-8:]
291
291
292 def node2txt(repo, node, ver):
292 def node2txt(repo, node, ver):
293 """map a manifest into some text"""
293 """map a manifest into some text"""
294 if ver == "0":
294 if ver == "0":
295 return "%s\n" % hgnode.hex(node)
295 return "%s\n" % hgnode.hex(node)
296 else:
296 else:
297 raise util.Abort(_("unknown signature version"))
297 raise util.Abort(_("unknown signature version"))
@@ -1,749 +1,749 b''
1 # keyword.py - $Keyword$ expansion for Mercurial
1 # keyword.py - $Keyword$ expansion for Mercurial
2 #
2 #
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
3 # Copyright 2007-2015 Christian Ebert <blacktrash@gmx.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 #
7 #
8 # $Id$
8 # $Id$
9 #
9 #
10 # Keyword expansion hack against the grain of a Distributed SCM
10 # Keyword expansion hack against the grain of a Distributed SCM
11 #
11 #
12 # There are many good reasons why this is not needed in a distributed
12 # There are many good reasons why this is not needed in a distributed
13 # SCM, still it may be useful in very small projects based on single
13 # SCM, still it may be useful in very small projects based on single
14 # files (like LaTeX packages), that are mostly addressed to an
14 # files (like LaTeX packages), that are mostly addressed to an
15 # audience not running a version control system.
15 # audience not running a version control system.
16 #
16 #
17 # For in-depth discussion refer to
17 # For in-depth discussion refer to
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
18 # <http://mercurial.selenic.com/wiki/KeywordPlan>.
19 #
19 #
20 # Keyword expansion is based on Mercurial's changeset template mappings.
20 # Keyword expansion is based on Mercurial's changeset template mappings.
21 #
21 #
22 # Binary files are not touched.
22 # Binary files are not touched.
23 #
23 #
24 # Files to act upon/ignore are specified in the [keyword] section.
24 # Files to act upon/ignore are specified in the [keyword] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
25 # Customized keyword template mappings in the [keywordmaps] section.
26 #
26 #
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
27 # Run "hg help keyword" and "hg kwdemo" to get info on configuration.
28
28
29 '''expand keywords in tracked files
29 '''expand keywords in tracked files
30
30
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
31 This extension expands RCS/CVS-like or self-customized $Keywords$ in
32 tracked text files selected by your configuration.
32 tracked text files selected by your configuration.
33
33
34 Keywords are only expanded in local repositories and not stored in the
34 Keywords are only expanded in local repositories and not stored in the
35 change history. The mechanism can be regarded as a convenience for the
35 change history. The mechanism can be regarded as a convenience for the
36 current user or for archive distribution.
36 current user or for archive distribution.
37
37
38 Keywords expand to the changeset data pertaining to the latest change
38 Keywords expand to the changeset data pertaining to the latest change
39 relative to the working directory parent of each file.
39 relative to the working directory parent of each file.
40
40
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
41 Configuration is done in the [keyword], [keywordset] and [keywordmaps]
42 sections of hgrc files.
42 sections of hgrc files.
43
43
44 Example::
44 Example::
45
45
46 [keyword]
46 [keyword]
47 # expand keywords in every python file except those matching "x*"
47 # expand keywords in every python file except those matching "x*"
48 **.py =
48 **.py =
49 x* = ignore
49 x* = ignore
50
50
51 [keywordset]
51 [keywordset]
52 # prefer svn- over cvs-like default keywordmaps
52 # prefer svn- over cvs-like default keywordmaps
53 svn = True
53 svn = True
54
54
55 .. note::
55 .. note::
56
56
57 The more specific you are in your filename patterns the less you
57 The more specific you are in your filename patterns the less you
58 lose speed in huge repositories.
58 lose speed in huge repositories.
59
59
60 For [keywordmaps] template mapping and expansion demonstration and
60 For [keywordmaps] template mapping and expansion demonstration and
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
61 control run :hg:`kwdemo`. See :hg:`help templates` for a list of
62 available templates and filters.
62 available templates and filters.
63
63
64 Three additional date template filters are provided:
64 Three additional date template filters are provided:
65
65
66 :``utcdate``: "2006/09/18 15:13:13"
66 :``utcdate``: "2006/09/18 15:13:13"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
67 :``svnutcdate``: "2006-09-18 15:13:13Z"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
68 :``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
69
69
70 The default template mappings (view with :hg:`kwdemo -d`) can be
70 The default template mappings (view with :hg:`kwdemo -d`) can be
71 replaced with customized keywords and templates. Again, run
71 replaced with customized keywords and templates. Again, run
72 :hg:`kwdemo` to control the results of your configuration changes.
72 :hg:`kwdemo` to control the results of your configuration changes.
73
73
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
74 Before changing/disabling active keywords, you must run :hg:`kwshrink`
75 to avoid storing expanded keywords in the change history.
75 to avoid storing expanded keywords in the change history.
76
76
77 To force expansion after enabling it, or a configuration change, run
77 To force expansion after enabling it, or a configuration change, run
78 :hg:`kwexpand`.
78 :hg:`kwexpand`.
79
79
80 Expansions spanning more than one line and incremental expansions,
80 Expansions spanning more than one line and incremental expansions,
81 like CVS' $Log$, are not supported. A keyword template map "Log =
81 like CVS' $Log$, are not supported. A keyword template map "Log =
82 {desc}" expands to the first line of the changeset description.
82 {desc}" expands to the first line of the changeset description.
83 '''
83 '''
84
84
85 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
85 from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
86 from mercurial import localrepo, match, patch, templatefilters, templater, util
86 from mercurial import localrepo, match, patch, templatefilters, templater, util
87 from mercurial import scmutil, pathutil
87 from mercurial import scmutil, pathutil
88 from mercurial.hgweb import webcommands
88 from mercurial.hgweb import webcommands
89 from mercurial.i18n import _
89 from mercurial.i18n import _
90 import os, re, tempfile
90 import os, re, tempfile
91
91
92 cmdtable = {}
92 cmdtable = {}
93 command = cmdutil.command(cmdtable)
93 command = cmdutil.command(cmdtable)
94 testedwith = 'internal'
94 testedwith = 'internal'
95
95
96 # hg commands that do not act on keywords
96 # hg commands that do not act on keywords
97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
97 nokwcommands = ('add addremove annotate bundle export grep incoming init log'
98 ' outgoing push tip verify convert email glog')
98 ' outgoing push tip verify convert email glog')
99
99
100 # hg commands that trigger expansion only when writing to working dir,
100 # hg commands that trigger expansion only when writing to working dir,
101 # not when reading filelog, and unexpand when reading from working dir
101 # not when reading filelog, and unexpand when reading from working dir
102 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
102 restricted = ('merge kwexpand kwshrink record qrecord resolve transplant'
103 ' unshelve rebase graft backout histedit fetch')
103 ' unshelve rebase graft backout histedit fetch')
104
104
105 # names of extensions using dorecord
105 # names of extensions using dorecord
106 recordextensions = 'record'
106 recordextensions = 'record'
107
107
108 colortable = {
108 colortable = {
109 'kwfiles.enabled': 'green bold',
109 'kwfiles.enabled': 'green bold',
110 'kwfiles.deleted': 'cyan bold underline',
110 'kwfiles.deleted': 'cyan bold underline',
111 'kwfiles.enabledunknown': 'green',
111 'kwfiles.enabledunknown': 'green',
112 'kwfiles.ignored': 'bold',
112 'kwfiles.ignored': 'bold',
113 'kwfiles.ignoredunknown': 'none'
113 'kwfiles.ignoredunknown': 'none'
114 }
114 }
115
115
116 # date like in cvs' $Date
116 # date like in cvs' $Date
117 def utcdate(text):
117 def utcdate(text):
118 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
118 ''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
119 '''
119 '''
120 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
120 return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
121 # date like in svn's $Date
121 # date like in svn's $Date
122 def svnisodate(text):
122 def svnisodate(text):
123 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
123 ''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
124 +0200 (Tue, 18 Aug 2009)".
124 +0200 (Tue, 18 Aug 2009)".
125 '''
125 '''
126 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
126 return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
127 # date like in svn's $Id
127 # date like in svn's $Id
128 def svnutcdate(text):
128 def svnutcdate(text):
129 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
129 ''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
130 11:00:13Z".
130 11:00:13Z".
131 '''
131 '''
132 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
132 return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
133
133
134 templatefilters.filters.update({'utcdate': utcdate,
134 templatefilters.filters.update({'utcdate': utcdate,
135 'svnisodate': svnisodate,
135 'svnisodate': svnisodate,
136 'svnutcdate': svnutcdate})
136 'svnutcdate': svnutcdate})
137
137
138 # make keyword tools accessible
138 # make keyword tools accessible
139 kwtools = {'templater': None, 'hgcmd': ''}
139 kwtools = {'templater': None, 'hgcmd': ''}
140
140
141 def _defaultkwmaps(ui):
141 def _defaultkwmaps(ui):
142 '''Returns default keywordmaps according to keywordset configuration.'''
142 '''Returns default keywordmaps according to keywordset configuration.'''
143 templates = {
143 templates = {
144 'Revision': '{node|short}',
144 'Revision': '{node|short}',
145 'Author': '{author|user}',
145 'Author': '{author|user}',
146 }
146 }
147 kwsets = ({
147 kwsets = ({
148 'Date': '{date|utcdate}',
148 'Date': '{date|utcdate}',
149 'RCSfile': '{file|basename},v',
149 'RCSfile': '{file|basename},v',
150 'RCSFile': '{file|basename},v', # kept for backwards compatibility
150 'RCSFile': '{file|basename},v', # kept for backwards compatibility
151 # with hg-keyword
151 # with hg-keyword
152 'Source': '{root}/{file},v',
152 'Source': '{root}/{file},v',
153 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
153 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
154 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
154 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
155 }, {
155 }, {
156 'Date': '{date|svnisodate}',
156 'Date': '{date|svnisodate}',
157 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
157 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
158 'LastChangedRevision': '{node|short}',
158 'LastChangedRevision': '{node|short}',
159 'LastChangedBy': '{author|user}',
159 'LastChangedBy': '{author|user}',
160 'LastChangedDate': '{date|svnisodate}',
160 'LastChangedDate': '{date|svnisodate}',
161 })
161 })
162 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
162 templates.update(kwsets[ui.configbool('keywordset', 'svn')])
163 return templates
163 return templates
164
164
165 def _shrinktext(text, subfunc):
165 def _shrinktext(text, subfunc):
166 '''Helper for keyword expansion removal in text.
166 '''Helper for keyword expansion removal in text.
167 Depending on subfunc also returns number of substitutions.'''
167 Depending on subfunc also returns number of substitutions.'''
168 return subfunc(r'$\1$', text)
168 return subfunc(r'$\1$', text)
169
169
170 def _preselect(wstatus, changed):
170 def _preselect(wstatus, changed):
171 '''Retrieves modified and added files from a working directory state
171 '''Retrieves modified and added files from a working directory state
172 and returns the subset of each contained in given changed files
172 and returns the subset of each contained in given changed files
173 retrieved from a change context.'''
173 retrieved from a change context.'''
174 modified = [f for f in wstatus.modified if f in changed]
174 modified = [f for f in wstatus.modified if f in changed]
175 added = [f for f in wstatus.added if f in changed]
175 added = [f for f in wstatus.added if f in changed]
176 return modified, added
176 return modified, added
177
177
178
178
179 class kwtemplater(object):
179 class kwtemplater(object):
180 '''
180 '''
181 Sets up keyword templates, corresponding keyword regex, and
181 Sets up keyword templates, corresponding keyword regex, and
182 provides keyword substitution functions.
182 provides keyword substitution functions.
183 '''
183 '''
184
184
185 def __init__(self, ui, repo, inc, exc):
185 def __init__(self, ui, repo, inc, exc):
186 self.ui = ui
186 self.ui = ui
187 self.repo = repo
187 self.repo = repo
188 self.match = match.match(repo.root, '', [], inc, exc)
188 self.match = match.match(repo.root, '', [], inc, exc)
189 self.restrict = kwtools['hgcmd'] in restricted.split()
189 self.restrict = kwtools['hgcmd'] in restricted.split()
190 self.postcommit = False
190 self.postcommit = False
191
191
192 kwmaps = self.ui.configitems('keywordmaps')
192 kwmaps = self.ui.configitems('keywordmaps')
193 if kwmaps: # override default templates
193 if kwmaps: # override default templates
194 self.templates = dict((k, templater.parsestring(v, False))
194 self.templates = dict((k, templater.parsestring(v, False))
195 for k, v in kwmaps)
195 for k, v in kwmaps)
196 else:
196 else:
197 self.templates = _defaultkwmaps(self.ui)
197 self.templates = _defaultkwmaps(self.ui)
198
198
199 @util.propertycache
199 @util.propertycache
200 def escape(self):
200 def escape(self):
201 '''Returns bar-separated and escaped keywords.'''
201 '''Returns bar-separated and escaped keywords.'''
202 return '|'.join(map(re.escape, self.templates.keys()))
202 return '|'.join(map(re.escape, self.templates.keys()))
203
203
204 @util.propertycache
204 @util.propertycache
205 def rekw(self):
205 def rekw(self):
206 '''Returns regex for unexpanded keywords.'''
206 '''Returns regex for unexpanded keywords.'''
207 return re.compile(r'\$(%s)\$' % self.escape)
207 return re.compile(r'\$(%s)\$' % self.escape)
208
208
209 @util.propertycache
209 @util.propertycache
210 def rekwexp(self):
210 def rekwexp(self):
211 '''Returns regex for expanded keywords.'''
211 '''Returns regex for expanded keywords.'''
212 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
212 return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
213
213
214 def substitute(self, data, path, ctx, subfunc):
214 def substitute(self, data, path, ctx, subfunc):
215 '''Replaces keywords in data with expanded template.'''
215 '''Replaces keywords in data with expanded template.'''
216 def kwsub(mobj):
216 def kwsub(mobj):
217 kw = mobj.group(1)
217 kw = mobj.group(1)
218 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
218 ct = cmdutil.changeset_templater(self.ui, self.repo, False, None,
219 self.templates[kw], '', False)
219 self.templates[kw], '', False)
220 self.ui.pushbuffer()
220 self.ui.pushbuffer()
221 ct.show(ctx, root=self.repo.root, file=path)
221 ct.show(ctx, root=self.repo.root, file=path)
222 ekw = templatefilters.firstline(self.ui.popbuffer())
222 ekw = templatefilters.firstline(self.ui.popbuffer())
223 return '$%s: %s $' % (kw, ekw)
223 return '$%s: %s $' % (kw, ekw)
224 return subfunc(kwsub, data)
224 return subfunc(kwsub, data)
225
225
226 def linkctx(self, path, fileid):
226 def linkctx(self, path, fileid):
227 '''Similar to filelog.linkrev, but returns a changectx.'''
227 '''Similar to filelog.linkrev, but returns a changectx.'''
228 return self.repo.filectx(path, fileid=fileid).changectx()
228 return self.repo.filectx(path, fileid=fileid).changectx()
229
229
230 def expand(self, path, node, data):
230 def expand(self, path, node, data):
231 '''Returns data with keywords expanded.'''
231 '''Returns data with keywords expanded.'''
232 if not self.restrict and self.match(path) and not util.binary(data):
232 if not self.restrict and self.match(path) and not util.binary(data):
233 ctx = self.linkctx(path, node)
233 ctx = self.linkctx(path, node)
234 return self.substitute(data, path, ctx, self.rekw.sub)
234 return self.substitute(data, path, ctx, self.rekw.sub)
235 return data
235 return data
236
236
237 def iskwfile(self, cand, ctx):
237 def iskwfile(self, cand, ctx):
238 '''Returns subset of candidates which are configured for keyword
238 '''Returns subset of candidates which are configured for keyword
239 expansion but are not symbolic links.'''
239 expansion but are not symbolic links.'''
240 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
240 return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
241
241
242 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
242 def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
243 '''Overwrites selected files expanding/shrinking keywords.'''
243 '''Overwrites selected files expanding/shrinking keywords.'''
244 if self.restrict or lookup or self.postcommit: # exclude kw_copy
244 if self.restrict or lookup or self.postcommit: # exclude kw_copy
245 candidates = self.iskwfile(candidates, ctx)
245 candidates = self.iskwfile(candidates, ctx)
246 if not candidates:
246 if not candidates:
247 return
247 return
248 kwcmd = self.restrict and lookup # kwexpand/kwshrink
248 kwcmd = self.restrict and lookup # kwexpand/kwshrink
249 if self.restrict or expand and lookup:
249 if self.restrict or expand and lookup:
250 mf = ctx.manifest()
250 mf = ctx.manifest()
251 if self.restrict or rekw:
251 if self.restrict or rekw:
252 re_kw = self.rekw
252 re_kw = self.rekw
253 else:
253 else:
254 re_kw = self.rekwexp
254 re_kw = self.rekwexp
255 if expand:
255 if expand:
256 msg = _('overwriting %s expanding keywords\n')
256 msg = _('overwriting %s expanding keywords\n')
257 else:
257 else:
258 msg = _('overwriting %s shrinking keywords\n')
258 msg = _('overwriting %s shrinking keywords\n')
259 for f in candidates:
259 for f in candidates:
260 if self.restrict:
260 if self.restrict:
261 data = self.repo.file(f).read(mf[f])
261 data = self.repo.file(f).read(mf[f])
262 else:
262 else:
263 data = self.repo.wread(f)
263 data = self.repo.wread(f)
264 if util.binary(data):
264 if util.binary(data):
265 continue
265 continue
266 if expand:
266 if expand:
267 parents = ctx.parents()
267 parents = ctx.parents()
268 if lookup:
268 if lookup:
269 ctx = self.linkctx(f, mf[f])
269 ctx = self.linkctx(f, mf[f])
270 elif self.restrict and len(parents) > 1:
270 elif self.restrict and len(parents) > 1:
271 # merge commit
271 # merge commit
272 # in case of conflict f is in modified state during
272 # in case of conflict f is in modified state during
273 # merge, even if f does not differ from f in parent
273 # merge, even if f does not differ from f in parent
274 for p in parents:
274 for p in parents:
275 if f in p and not p[f].cmp(ctx[f]):
275 if f in p and not p[f].cmp(ctx[f]):
276 ctx = p[f].changectx()
276 ctx = p[f].changectx()
277 break
277 break
278 data, found = self.substitute(data, f, ctx, re_kw.subn)
278 data, found = self.substitute(data, f, ctx, re_kw.subn)
279 elif self.restrict:
279 elif self.restrict:
280 found = re_kw.search(data)
280 found = re_kw.search(data)
281 else:
281 else:
282 data, found = _shrinktext(data, re_kw.subn)
282 data, found = _shrinktext(data, re_kw.subn)
283 if found:
283 if found:
284 self.ui.note(msg % f)
284 self.ui.note(msg % f)
285 fp = self.repo.wopener(f, "wb", atomictemp=True)
285 fp = self.repo.wopener(f, "wb", atomictemp=True)
286 fp.write(data)
286 fp.write(data)
287 fp.close()
287 fp.close()
288 if kwcmd:
288 if kwcmd:
289 self.repo.dirstate.normal(f)
289 self.repo.dirstate.normal(f)
290 elif self.postcommit:
290 elif self.postcommit:
291 self.repo.dirstate.normallookup(f)
291 self.repo.dirstate.normallookup(f)
292
292
293 def shrink(self, fname, text):
293 def shrink(self, fname, text):
294 '''Returns text with all keyword substitutions removed.'''
294 '''Returns text with all keyword substitutions removed.'''
295 if self.match(fname) and not util.binary(text):
295 if self.match(fname) and not util.binary(text):
296 return _shrinktext(text, self.rekwexp.sub)
296 return _shrinktext(text, self.rekwexp.sub)
297 return text
297 return text
298
298
299 def shrinklines(self, fname, lines):
299 def shrinklines(self, fname, lines):
300 '''Returns lines with keyword substitutions removed.'''
300 '''Returns lines with keyword substitutions removed.'''
301 if self.match(fname):
301 if self.match(fname):
302 text = ''.join(lines)
302 text = ''.join(lines)
303 if not util.binary(text):
303 if not util.binary(text):
304 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
304 return _shrinktext(text, self.rekwexp.sub).splitlines(True)
305 return lines
305 return lines
306
306
307 def wread(self, fname, data):
307 def wread(self, fname, data):
308 '''If in restricted mode returns data read from wdir with
308 '''If in restricted mode returns data read from wdir with
309 keyword substitutions removed.'''
309 keyword substitutions removed.'''
310 if self.restrict:
310 if self.restrict:
311 return self.shrink(fname, data)
311 return self.shrink(fname, data)
312 return data
312 return data
313
313
314 class kwfilelog(filelog.filelog):
314 class kwfilelog(filelog.filelog):
315 '''
315 '''
316 Subclass of filelog to hook into its read, add, cmp methods.
316 Subclass of filelog to hook into its read, add, cmp methods.
317 Keywords are "stored" unexpanded, and processed on reading.
317 Keywords are "stored" unexpanded, and processed on reading.
318 '''
318 '''
319 def __init__(self, opener, kwt, path):
319 def __init__(self, opener, kwt, path):
320 super(kwfilelog, self).__init__(opener, path)
320 super(kwfilelog, self).__init__(opener, path)
321 self.kwt = kwt
321 self.kwt = kwt
322 self.path = path
322 self.path = path
323
323
324 def read(self, node):
324 def read(self, node):
325 '''Expands keywords when reading filelog.'''
325 '''Expands keywords when reading filelog.'''
326 data = super(kwfilelog, self).read(node)
326 data = super(kwfilelog, self).read(node)
327 if self.renamed(node):
327 if self.renamed(node):
328 return data
328 return data
329 return self.kwt.expand(self.path, node, data)
329 return self.kwt.expand(self.path, node, data)
330
330
331 def add(self, text, meta, tr, link, p1=None, p2=None):
331 def add(self, text, meta, tr, link, p1=None, p2=None):
332 '''Removes keyword substitutions when adding to filelog.'''
332 '''Removes keyword substitutions when adding to filelog.'''
333 text = self.kwt.shrink(self.path, text)
333 text = self.kwt.shrink(self.path, text)
334 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
334 return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
335
335
336 def cmp(self, node, text):
336 def cmp(self, node, text):
337 '''Removes keyword substitutions for comparison.'''
337 '''Removes keyword substitutions for comparison.'''
338 text = self.kwt.shrink(self.path, text)
338 text = self.kwt.shrink(self.path, text)
339 return super(kwfilelog, self).cmp(node, text)
339 return super(kwfilelog, self).cmp(node, text)
340
340
341 def _status(ui, repo, wctx, kwt, *pats, **opts):
341 def _status(ui, repo, wctx, kwt, *pats, **opts):
342 '''Bails out if [keyword] configuration is not active.
342 '''Bails out if [keyword] configuration is not active.
343 Returns status of working directory.'''
343 Returns status of working directory.'''
344 if kwt:
344 if kwt:
345 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
345 return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
346 unknown=opts.get('unknown') or opts.get('all'))
346 unknown=opts.get('unknown') or opts.get('all'))
347 if ui.configitems('keyword'):
347 if ui.configitems('keyword'):
348 raise util.Abort(_('[keyword] patterns cannot match'))
348 raise util.Abort(_('[keyword] patterns cannot match'))
349 raise util.Abort(_('no [keyword] patterns configured'))
349 raise util.Abort(_('no [keyword] patterns configured'))
350
350
351 def _kwfwrite(ui, repo, expand, *pats, **opts):
351 def _kwfwrite(ui, repo, expand, *pats, **opts):
352 '''Selects files and passes them to kwtemplater.overwrite.'''
352 '''Selects files and passes them to kwtemplater.overwrite.'''
353 wctx = repo[None]
353 wctx = repo[None]
354 if len(wctx.parents()) > 1:
354 if len(wctx.parents()) > 1:
355 raise util.Abort(_('outstanding uncommitted merge'))
355 raise util.Abort(_('outstanding uncommitted merge'))
356 kwt = kwtools['templater']
356 kwt = kwtools['templater']
357 wlock = repo.wlock()
357 wlock = repo.wlock()
358 try:
358 try:
359 status = _status(ui, repo, wctx, kwt, *pats, **opts)
359 status = _status(ui, repo, wctx, kwt, *pats, **opts)
360 if status.modified or status.added or status.removed or status.deleted:
360 if status.modified or status.added or status.removed or status.deleted:
361 raise util.Abort(_('outstanding uncommitted changes'))
361 raise util.Abort(_('outstanding uncommitted changes'))
362 kwt.overwrite(wctx, status.clean, True, expand)
362 kwt.overwrite(wctx, status.clean, True, expand)
363 finally:
363 finally:
364 wlock.release()
364 wlock.release()
365
365
366 @command('kwdemo',
366 @command('kwdemo',
367 [('d', 'default', None, _('show default keyword template maps')),
367 [('d', 'default', None, _('show default keyword template maps')),
368 ('f', 'rcfile', '',
368 ('f', 'rcfile', '',
369 _('read maps from rcfile'), _('FILE'))],
369 _('read maps from rcfile'), _('FILE'))],
370 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
370 _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'),
371 optionalrepo=True)
371 optionalrepo=True)
372 def demo(ui, repo, *args, **opts):
372 def demo(ui, repo, *args, **opts):
373 '''print [keywordmaps] configuration and an expansion example
373 '''print [keywordmaps] configuration and an expansion example
374
374
375 Show current, custom, or default keyword template maps and their
375 Show current, custom, or default keyword template maps and their
376 expansions.
376 expansions.
377
377
378 Extend the current configuration by specifying maps as arguments
378 Extend the current configuration by specifying maps as arguments
379 and using -f/--rcfile to source an external hgrc file.
379 and using -f/--rcfile to source an external hgrc file.
380
380
381 Use -d/--default to disable current configuration.
381 Use -d/--default to disable current configuration.
382
382
383 See :hg:`help templates` for information on templates and filters.
383 See :hg:`help templates` for information on templates and filters.
384 '''
384 '''
385 def demoitems(section, items):
385 def demoitems(section, items):
386 ui.write('[%s]\n' % section)
386 ui.write('[%s]\n' % section)
387 for k, v in sorted(items):
387 for k, v in sorted(items):
388 ui.write('%s = %s\n' % (k, v))
388 ui.write('%s = %s\n' % (k, v))
389
389
390 fn = 'demo.txt'
390 fn = 'demo.txt'
391 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
391 tmpdir = tempfile.mkdtemp('', 'kwdemo.')
392 ui.note(_('creating temporary repository at %s\n') % tmpdir)
392 ui.note(_('creating temporary repository at %s\n') % tmpdir)
393 repo = localrepo.localrepository(repo.baseui, tmpdir, True)
393 repo = localrepo.localrepository(repo.baseui, tmpdir, True)
394 ui.setconfig('keyword', fn, '', 'keyword')
394 ui.setconfig('keyword', fn, '', 'keyword')
395 svn = ui.configbool('keywordset', 'svn')
395 svn = ui.configbool('keywordset', 'svn')
396 # explicitly set keywordset for demo output
396 # explicitly set keywordset for demo output
397 ui.setconfig('keywordset', 'svn', svn, 'keyword')
397 ui.setconfig('keywordset', 'svn', svn, 'keyword')
398
398
399 uikwmaps = ui.configitems('keywordmaps')
399 uikwmaps = ui.configitems('keywordmaps')
400 if args or opts.get('rcfile'):
400 if args or opts.get('rcfile'):
401 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
401 ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
402 if uikwmaps:
402 if uikwmaps:
403 ui.status(_('\textending current template maps\n'))
403 ui.status(_('\textending current template maps\n'))
404 if opts.get('default') or not uikwmaps:
404 if opts.get('default') or not uikwmaps:
405 if svn:
405 if svn:
406 ui.status(_('\toverriding default svn keywordset\n'))
406 ui.status(_('\toverriding default svn keywordset\n'))
407 else:
407 else:
408 ui.status(_('\toverriding default cvs keywordset\n'))
408 ui.status(_('\toverriding default cvs keywordset\n'))
409 if opts.get('rcfile'):
409 if opts.get('rcfile'):
410 ui.readconfig(opts.get('rcfile'))
410 ui.readconfig(opts.get('rcfile'))
411 if args:
411 if args:
412 # simulate hgrc parsing
412 # simulate hgrc parsing
413 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
413 rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
414 fp = repo.opener('hgrc', 'w')
414 fp = repo.vfs('hgrc', 'w')
415 fp.writelines(rcmaps)
415 fp.writelines(rcmaps)
416 fp.close()
416 fp.close()
417 ui.readconfig(repo.join('hgrc'))
417 ui.readconfig(repo.join('hgrc'))
418 kwmaps = dict(ui.configitems('keywordmaps'))
418 kwmaps = dict(ui.configitems('keywordmaps'))
419 elif opts.get('default'):
419 elif opts.get('default'):
420 if svn:
420 if svn:
421 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
421 ui.status(_('\n\tconfiguration using default svn keywordset\n'))
422 else:
422 else:
423 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
423 ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
424 kwmaps = _defaultkwmaps(ui)
424 kwmaps = _defaultkwmaps(ui)
425 if uikwmaps:
425 if uikwmaps:
426 ui.status(_('\tdisabling current template maps\n'))
426 ui.status(_('\tdisabling current template maps\n'))
427 for k, v in kwmaps.iteritems():
427 for k, v in kwmaps.iteritems():
428 ui.setconfig('keywordmaps', k, v, 'keyword')
428 ui.setconfig('keywordmaps', k, v, 'keyword')
429 else:
429 else:
430 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
430 ui.status(_('\n\tconfiguration using current keyword template maps\n'))
431 if uikwmaps:
431 if uikwmaps:
432 kwmaps = dict(uikwmaps)
432 kwmaps = dict(uikwmaps)
433 else:
433 else:
434 kwmaps = _defaultkwmaps(ui)
434 kwmaps = _defaultkwmaps(ui)
435
435
436 uisetup(ui)
436 uisetup(ui)
437 reposetup(ui, repo)
437 reposetup(ui, repo)
438 ui.write('[extensions]\nkeyword =\n')
438 ui.write('[extensions]\nkeyword =\n')
439 demoitems('keyword', ui.configitems('keyword'))
439 demoitems('keyword', ui.configitems('keyword'))
440 demoitems('keywordset', ui.configitems('keywordset'))
440 demoitems('keywordset', ui.configitems('keywordset'))
441 demoitems('keywordmaps', kwmaps.iteritems())
441 demoitems('keywordmaps', kwmaps.iteritems())
442 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
442 keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
443 repo.wopener.write(fn, keywords)
443 repo.wopener.write(fn, keywords)
444 repo[None].add([fn])
444 repo[None].add([fn])
445 ui.note(_('\nkeywords written to %s:\n') % fn)
445 ui.note(_('\nkeywords written to %s:\n') % fn)
446 ui.note(keywords)
446 ui.note(keywords)
447 wlock = repo.wlock()
447 wlock = repo.wlock()
448 try:
448 try:
449 repo.dirstate.setbranch('demobranch')
449 repo.dirstate.setbranch('demobranch')
450 finally:
450 finally:
451 wlock.release()
451 wlock.release()
452 for name, cmd in ui.configitems('hooks'):
452 for name, cmd in ui.configitems('hooks'):
453 if name.split('.', 1)[0].find('commit') > -1:
453 if name.split('.', 1)[0].find('commit') > -1:
454 repo.ui.setconfig('hooks', name, '', 'keyword')
454 repo.ui.setconfig('hooks', name, '', 'keyword')
455 msg = _('hg keyword configuration and expansion example')
455 msg = _('hg keyword configuration and expansion example')
456 ui.note(("hg ci -m '%s'\n" % msg))
456 ui.note(("hg ci -m '%s'\n" % msg))
457 repo.commit(text=msg)
457 repo.commit(text=msg)
458 ui.status(_('\n\tkeywords expanded\n'))
458 ui.status(_('\n\tkeywords expanded\n'))
459 ui.write(repo.wread(fn))
459 ui.write(repo.wread(fn))
460 for root, dirs, files in os.walk(tmpdir):
460 for root, dirs, files in os.walk(tmpdir):
461 for f in files:
461 for f in files:
462 util.unlinkpath(repo.vfs.reljoin(root, f))
462 util.unlinkpath(repo.vfs.reljoin(root, f))
463
463
464 @command('kwexpand',
464 @command('kwexpand',
465 commands.walkopts,
465 commands.walkopts,
466 _('hg kwexpand [OPTION]... [FILE]...'),
466 _('hg kwexpand [OPTION]... [FILE]...'),
467 inferrepo=True)
467 inferrepo=True)
468 def expand(ui, repo, *pats, **opts):
468 def expand(ui, repo, *pats, **opts):
469 '''expand keywords in the working directory
469 '''expand keywords in the working directory
470
470
471 Run after (re)enabling keyword expansion.
471 Run after (re)enabling keyword expansion.
472
472
473 kwexpand refuses to run if given files contain local changes.
473 kwexpand refuses to run if given files contain local changes.
474 '''
474 '''
475 # 3rd argument sets expansion to True
475 # 3rd argument sets expansion to True
476 _kwfwrite(ui, repo, True, *pats, **opts)
476 _kwfwrite(ui, repo, True, *pats, **opts)
477
477
478 @command('kwfiles',
478 @command('kwfiles',
479 [('A', 'all', None, _('show keyword status flags of all files')),
479 [('A', 'all', None, _('show keyword status flags of all files')),
480 ('i', 'ignore', None, _('show files excluded from expansion')),
480 ('i', 'ignore', None, _('show files excluded from expansion')),
481 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
481 ('u', 'unknown', None, _('only show unknown (not tracked) files')),
482 ] + commands.walkopts,
482 ] + commands.walkopts,
483 _('hg kwfiles [OPTION]... [FILE]...'),
483 _('hg kwfiles [OPTION]... [FILE]...'),
484 inferrepo=True)
484 inferrepo=True)
485 def files(ui, repo, *pats, **opts):
485 def files(ui, repo, *pats, **opts):
486 '''show files configured for keyword expansion
486 '''show files configured for keyword expansion
487
487
488 List which files in the working directory are matched by the
488 List which files in the working directory are matched by the
489 [keyword] configuration patterns.
489 [keyword] configuration patterns.
490
490
491 Useful to prevent inadvertent keyword expansion and to speed up
491 Useful to prevent inadvertent keyword expansion and to speed up
492 execution by including only files that are actual candidates for
492 execution by including only files that are actual candidates for
493 expansion.
493 expansion.
494
494
495 See :hg:`help keyword` on how to construct patterns both for
495 See :hg:`help keyword` on how to construct patterns both for
496 inclusion and exclusion of files.
496 inclusion and exclusion of files.
497
497
498 With -A/--all and -v/--verbose the codes used to show the status
498 With -A/--all and -v/--verbose the codes used to show the status
499 of files are::
499 of files are::
500
500
501 K = keyword expansion candidate
501 K = keyword expansion candidate
502 k = keyword expansion candidate (not tracked)
502 k = keyword expansion candidate (not tracked)
503 I = ignored
503 I = ignored
504 i = ignored (not tracked)
504 i = ignored (not tracked)
505 '''
505 '''
506 kwt = kwtools['templater']
506 kwt = kwtools['templater']
507 wctx = repo[None]
507 wctx = repo[None]
508 status = _status(ui, repo, wctx, kwt, *pats, **opts)
508 status = _status(ui, repo, wctx, kwt, *pats, **opts)
509 cwd = pats and repo.getcwd() or ''
509 cwd = pats and repo.getcwd() or ''
510 files = []
510 files = []
511 if not opts.get('unknown') or opts.get('all'):
511 if not opts.get('unknown') or opts.get('all'):
512 files = sorted(status.modified + status.added + status.clean)
512 files = sorted(status.modified + status.added + status.clean)
513 kwfiles = kwt.iskwfile(files, wctx)
513 kwfiles = kwt.iskwfile(files, wctx)
514 kwdeleted = kwt.iskwfile(status.deleted, wctx)
514 kwdeleted = kwt.iskwfile(status.deleted, wctx)
515 kwunknown = kwt.iskwfile(status.unknown, wctx)
515 kwunknown = kwt.iskwfile(status.unknown, wctx)
516 if not opts.get('ignore') or opts.get('all'):
516 if not opts.get('ignore') or opts.get('all'):
517 showfiles = kwfiles, kwdeleted, kwunknown
517 showfiles = kwfiles, kwdeleted, kwunknown
518 else:
518 else:
519 showfiles = [], [], []
519 showfiles = [], [], []
520 if opts.get('all') or opts.get('ignore'):
520 if opts.get('all') or opts.get('ignore'):
521 showfiles += ([f for f in files if f not in kwfiles],
521 showfiles += ([f for f in files if f not in kwfiles],
522 [f for f in status.unknown if f not in kwunknown])
522 [f for f in status.unknown if f not in kwunknown])
523 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
523 kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
524 kwstates = zip(kwlabels, 'K!kIi', showfiles)
524 kwstates = zip(kwlabels, 'K!kIi', showfiles)
525 fm = ui.formatter('kwfiles', opts)
525 fm = ui.formatter('kwfiles', opts)
526 fmt = '%.0s%s\n'
526 fmt = '%.0s%s\n'
527 if opts.get('all') or ui.verbose:
527 if opts.get('all') or ui.verbose:
528 fmt = '%s %s\n'
528 fmt = '%s %s\n'
529 for kwstate, char, filenames in kwstates:
529 for kwstate, char, filenames in kwstates:
530 label = 'kwfiles.' + kwstate
530 label = 'kwfiles.' + kwstate
531 for f in filenames:
531 for f in filenames:
532 fm.startitem()
532 fm.startitem()
533 fm.write('kwstatus path', fmt, char,
533 fm.write('kwstatus path', fmt, char,
534 repo.pathto(f, cwd), label=label)
534 repo.pathto(f, cwd), label=label)
535 fm.end()
535 fm.end()
536
536
537 @command('kwshrink',
537 @command('kwshrink',
538 commands.walkopts,
538 commands.walkopts,
539 _('hg kwshrink [OPTION]... [FILE]...'),
539 _('hg kwshrink [OPTION]... [FILE]...'),
540 inferrepo=True)
540 inferrepo=True)
541 def shrink(ui, repo, *pats, **opts):
541 def shrink(ui, repo, *pats, **opts):
542 '''revert expanded keywords in the working directory
542 '''revert expanded keywords in the working directory
543
543
544 Must be run before changing/disabling active keywords.
544 Must be run before changing/disabling active keywords.
545
545
546 kwshrink refuses to run if given files contain local changes.
546 kwshrink refuses to run if given files contain local changes.
547 '''
547 '''
548 # 3rd argument sets expansion to False
548 # 3rd argument sets expansion to False
549 _kwfwrite(ui, repo, False, *pats, **opts)
549 _kwfwrite(ui, repo, False, *pats, **opts)
550
550
551
551
552 def uisetup(ui):
552 def uisetup(ui):
553 ''' Monkeypatches dispatch._parse to retrieve user command.'''
553 ''' Monkeypatches dispatch._parse to retrieve user command.'''
554
554
555 def kwdispatch_parse(orig, ui, args):
555 def kwdispatch_parse(orig, ui, args):
556 '''Monkeypatch dispatch._parse to obtain running hg command.'''
556 '''Monkeypatch dispatch._parse to obtain running hg command.'''
557 cmd, func, args, options, cmdoptions = orig(ui, args)
557 cmd, func, args, options, cmdoptions = orig(ui, args)
558 kwtools['hgcmd'] = cmd
558 kwtools['hgcmd'] = cmd
559 return cmd, func, args, options, cmdoptions
559 return cmd, func, args, options, cmdoptions
560
560
561 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
561 extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
562
562
563 def reposetup(ui, repo):
563 def reposetup(ui, repo):
564 '''Sets up repo as kwrepo for keyword substitution.
564 '''Sets up repo as kwrepo for keyword substitution.
565 Overrides file method to return kwfilelog instead of filelog
565 Overrides file method to return kwfilelog instead of filelog
566 if file matches user configuration.
566 if file matches user configuration.
567 Wraps commit to overwrite configured files with updated
567 Wraps commit to overwrite configured files with updated
568 keyword substitutions.
568 keyword substitutions.
569 Monkeypatches patch and webcommands.'''
569 Monkeypatches patch and webcommands.'''
570
570
571 try:
571 try:
572 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
572 if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
573 or '.hg' in util.splitpath(repo.root)
573 or '.hg' in util.splitpath(repo.root)
574 or repo._url.startswith('bundle:')):
574 or repo._url.startswith('bundle:')):
575 return
575 return
576 except AttributeError:
576 except AttributeError:
577 pass
577 pass
578
578
579 inc, exc = [], ['.hg*']
579 inc, exc = [], ['.hg*']
580 for pat, opt in ui.configitems('keyword'):
580 for pat, opt in ui.configitems('keyword'):
581 if opt != 'ignore':
581 if opt != 'ignore':
582 inc.append(pat)
582 inc.append(pat)
583 else:
583 else:
584 exc.append(pat)
584 exc.append(pat)
585 if not inc:
585 if not inc:
586 return
586 return
587
587
588 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
588 kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
589
589
590 class kwrepo(repo.__class__):
590 class kwrepo(repo.__class__):
591 def file(self, f):
591 def file(self, f):
592 if f[0] == '/':
592 if f[0] == '/':
593 f = f[1:]
593 f = f[1:]
594 return kwfilelog(self.sopener, kwt, f)
594 return kwfilelog(self.sopener, kwt, f)
595
595
596 def wread(self, filename):
596 def wread(self, filename):
597 data = super(kwrepo, self).wread(filename)
597 data = super(kwrepo, self).wread(filename)
598 return kwt.wread(filename, data)
598 return kwt.wread(filename, data)
599
599
600 def commit(self, *args, **opts):
600 def commit(self, *args, **opts):
601 # use custom commitctx for user commands
601 # use custom commitctx for user commands
602 # other extensions can still wrap repo.commitctx directly
602 # other extensions can still wrap repo.commitctx directly
603 self.commitctx = self.kwcommitctx
603 self.commitctx = self.kwcommitctx
604 try:
604 try:
605 return super(kwrepo, self).commit(*args, **opts)
605 return super(kwrepo, self).commit(*args, **opts)
606 finally:
606 finally:
607 del self.commitctx
607 del self.commitctx
608
608
609 def kwcommitctx(self, ctx, error=False):
609 def kwcommitctx(self, ctx, error=False):
610 n = super(kwrepo, self).commitctx(ctx, error)
610 n = super(kwrepo, self).commitctx(ctx, error)
611 # no lock needed, only called from repo.commit() which already locks
611 # no lock needed, only called from repo.commit() which already locks
612 if not kwt.postcommit:
612 if not kwt.postcommit:
613 restrict = kwt.restrict
613 restrict = kwt.restrict
614 kwt.restrict = True
614 kwt.restrict = True
615 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
615 kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
616 False, True)
616 False, True)
617 kwt.restrict = restrict
617 kwt.restrict = restrict
618 return n
618 return n
619
619
620 def rollback(self, dryrun=False, force=False):
620 def rollback(self, dryrun=False, force=False):
621 wlock = self.wlock()
621 wlock = self.wlock()
622 try:
622 try:
623 if not dryrun:
623 if not dryrun:
624 changed = self['.'].files()
624 changed = self['.'].files()
625 ret = super(kwrepo, self).rollback(dryrun, force)
625 ret = super(kwrepo, self).rollback(dryrun, force)
626 if not dryrun:
626 if not dryrun:
627 ctx = self['.']
627 ctx = self['.']
628 modified, added = _preselect(ctx.status(), changed)
628 modified, added = _preselect(ctx.status(), changed)
629 kwt.overwrite(ctx, modified, True, True)
629 kwt.overwrite(ctx, modified, True, True)
630 kwt.overwrite(ctx, added, True, False)
630 kwt.overwrite(ctx, added, True, False)
631 return ret
631 return ret
632 finally:
632 finally:
633 wlock.release()
633 wlock.release()
634
634
635 # monkeypatches
635 # monkeypatches
636 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
636 def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
637 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
637 '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
638 rejects or conflicts due to expanded keywords in working dir.'''
638 rejects or conflicts due to expanded keywords in working dir.'''
639 orig(self, ui, gp, backend, store, eolmode)
639 orig(self, ui, gp, backend, store, eolmode)
640 # shrink keywords read from working dir
640 # shrink keywords read from working dir
641 self.lines = kwt.shrinklines(self.fname, self.lines)
641 self.lines = kwt.shrinklines(self.fname, self.lines)
642
642
643 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
643 def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
644 opts=None, prefix=''):
644 opts=None, prefix=''):
645 '''Monkeypatch patch.diff to avoid expansion.'''
645 '''Monkeypatch patch.diff to avoid expansion.'''
646 kwt.restrict = True
646 kwt.restrict = True
647 return orig(repo, node1, node2, match, changes, opts, prefix)
647 return orig(repo, node1, node2, match, changes, opts, prefix)
648
648
649 def kwweb_skip(orig, web, req, tmpl):
649 def kwweb_skip(orig, web, req, tmpl):
650 '''Wraps webcommands.x turning off keyword expansion.'''
650 '''Wraps webcommands.x turning off keyword expansion.'''
651 kwt.match = util.never
651 kwt.match = util.never
652 return orig(web, req, tmpl)
652 return orig(web, req, tmpl)
653
653
654 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
654 def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
655 '''Wraps cmdutil.amend expanding keywords after amend.'''
655 '''Wraps cmdutil.amend expanding keywords after amend.'''
656 wlock = repo.wlock()
656 wlock = repo.wlock()
657 try:
657 try:
658 kwt.postcommit = True
658 kwt.postcommit = True
659 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
659 newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
660 if newid != old.node():
660 if newid != old.node():
661 ctx = repo[newid]
661 ctx = repo[newid]
662 kwt.restrict = True
662 kwt.restrict = True
663 kwt.overwrite(ctx, ctx.files(), False, True)
663 kwt.overwrite(ctx, ctx.files(), False, True)
664 kwt.restrict = False
664 kwt.restrict = False
665 return newid
665 return newid
666 finally:
666 finally:
667 wlock.release()
667 wlock.release()
668
668
669 def kw_copy(orig, ui, repo, pats, opts, rename=False):
669 def kw_copy(orig, ui, repo, pats, opts, rename=False):
670 '''Wraps cmdutil.copy so that copy/rename destinations do not
670 '''Wraps cmdutil.copy so that copy/rename destinations do not
671 contain expanded keywords.
671 contain expanded keywords.
672 Note that the source of a regular file destination may also be a
672 Note that the source of a regular file destination may also be a
673 symlink:
673 symlink:
674 hg cp sym x -> x is symlink
674 hg cp sym x -> x is symlink
675 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
675 cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
676 For the latter we have to follow the symlink to find out whether its
676 For the latter we have to follow the symlink to find out whether its
677 target is configured for expansion and we therefore must unexpand the
677 target is configured for expansion and we therefore must unexpand the
678 keywords in the destination.'''
678 keywords in the destination.'''
679 wlock = repo.wlock()
679 wlock = repo.wlock()
680 try:
680 try:
681 orig(ui, repo, pats, opts, rename)
681 orig(ui, repo, pats, opts, rename)
682 if opts.get('dry_run'):
682 if opts.get('dry_run'):
683 return
683 return
684 wctx = repo[None]
684 wctx = repo[None]
685 cwd = repo.getcwd()
685 cwd = repo.getcwd()
686
686
687 def haskwsource(dest):
687 def haskwsource(dest):
688 '''Returns true if dest is a regular file and configured for
688 '''Returns true if dest is a regular file and configured for
689 expansion or a symlink which points to a file configured for
689 expansion or a symlink which points to a file configured for
690 expansion. '''
690 expansion. '''
691 source = repo.dirstate.copied(dest)
691 source = repo.dirstate.copied(dest)
692 if 'l' in wctx.flags(source):
692 if 'l' in wctx.flags(source):
693 source = pathutil.canonpath(repo.root, cwd,
693 source = pathutil.canonpath(repo.root, cwd,
694 os.path.realpath(source))
694 os.path.realpath(source))
695 return kwt.match(source)
695 return kwt.match(source)
696
696
697 candidates = [f for f in repo.dirstate.copies() if
697 candidates = [f for f in repo.dirstate.copies() if
698 'l' not in wctx.flags(f) and haskwsource(f)]
698 'l' not in wctx.flags(f) and haskwsource(f)]
699 kwt.overwrite(wctx, candidates, False, False)
699 kwt.overwrite(wctx, candidates, False, False)
700 finally:
700 finally:
701 wlock.release()
701 wlock.release()
702
702
703 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
703 def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
704 '''Wraps record.dorecord expanding keywords after recording.'''
704 '''Wraps record.dorecord expanding keywords after recording.'''
705 wlock = repo.wlock()
705 wlock = repo.wlock()
706 try:
706 try:
707 # record returns 0 even when nothing has changed
707 # record returns 0 even when nothing has changed
708 # therefore compare nodes before and after
708 # therefore compare nodes before and after
709 kwt.postcommit = True
709 kwt.postcommit = True
710 ctx = repo['.']
710 ctx = repo['.']
711 wstatus = ctx.status()
711 wstatus = ctx.status()
712 ret = orig(ui, repo, commitfunc, *pats, **opts)
712 ret = orig(ui, repo, commitfunc, *pats, **opts)
713 recctx = repo['.']
713 recctx = repo['.']
714 if ctx != recctx:
714 if ctx != recctx:
715 modified, added = _preselect(wstatus, recctx.files())
715 modified, added = _preselect(wstatus, recctx.files())
716 kwt.restrict = False
716 kwt.restrict = False
717 kwt.overwrite(recctx, modified, False, True)
717 kwt.overwrite(recctx, modified, False, True)
718 kwt.overwrite(recctx, added, False, True, True)
718 kwt.overwrite(recctx, added, False, True, True)
719 kwt.restrict = True
719 kwt.restrict = True
720 return ret
720 return ret
721 finally:
721 finally:
722 wlock.release()
722 wlock.release()
723
723
724 def kwfilectx_cmp(orig, self, fctx):
724 def kwfilectx_cmp(orig, self, fctx):
725 # keyword affects data size, comparing wdir and filelog size does
725 # keyword affects data size, comparing wdir and filelog size does
726 # not make sense
726 # not make sense
727 if (fctx._filerev is None and
727 if (fctx._filerev is None and
728 (self._repo._encodefilterpats or
728 (self._repo._encodefilterpats or
729 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
729 kwt.match(fctx.path()) and 'l' not in fctx.flags() or
730 self.size() - 4 == fctx.size()) or
730 self.size() - 4 == fctx.size()) or
731 self.size() == fctx.size()):
731 self.size() == fctx.size()):
732 return self._filelog.cmp(self._filenode, fctx.data())
732 return self._filelog.cmp(self._filenode, fctx.data())
733 return True
733 return True
734
734
735 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
735 extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
736 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
736 extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
737 extensions.wrapfunction(patch, 'diff', kw_diff)
737 extensions.wrapfunction(patch, 'diff', kw_diff)
738 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
738 extensions.wrapfunction(cmdutil, 'amend', kw_amend)
739 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
739 extensions.wrapfunction(cmdutil, 'copy', kw_copy)
740 for c in 'annotate changeset rev filediff diff'.split():
740 for c in 'annotate changeset rev filediff diff'.split():
741 extensions.wrapfunction(webcommands, c, kwweb_skip)
741 extensions.wrapfunction(webcommands, c, kwweb_skip)
742 for name in recordextensions.split():
742 for name in recordextensions.split():
743 try:
743 try:
744 record = extensions.find(name)
744 record = extensions.find(name)
745 extensions.wrapfunction(record, 'dorecord', kw_dorecord)
745 extensions.wrapfunction(record, 'dorecord', kw_dorecord)
746 except KeyError:
746 except KeyError:
747 pass
747 pass
748
748
749 repo.__class__ = kwrepo
749 repo.__class__ = kwrepo
@@ -1,3552 +1,3552 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from mercurial.i18n import _
65 from mercurial.i18n import _
66 from mercurial.node import bin, hex, short, nullid, nullrev
66 from mercurial.node import bin, hex, short, nullid, nullrev
67 from mercurial.lock import release
67 from mercurial.lock import release
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
68 from mercurial import commands, cmdutil, hg, scmutil, util, revset
69 from mercurial import extensions, error, phases
69 from mercurial import extensions, error, phases
70 from mercurial import patch as patchmod
70 from mercurial import patch as patchmod
71 from mercurial import localrepo
71 from mercurial import localrepo
72 from mercurial import subrepo
72 from mercurial import subrepo
73 import os, re, errno, shutil
73 import os, re, errno, shutil
74
74
75 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
75 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
76
76
77 cmdtable = {}
77 cmdtable = {}
78 command = cmdutil.command(cmdtable)
78 command = cmdutil.command(cmdtable)
79 testedwith = 'internal'
79 testedwith = 'internal'
80
80
81 # force load strip extension formerly included in mq and import some utility
81 # force load strip extension formerly included in mq and import some utility
82 try:
82 try:
83 stripext = extensions.find('strip')
83 stripext = extensions.find('strip')
84 except KeyError:
84 except KeyError:
85 # note: load is lazy so we could avoid the try-except,
85 # note: load is lazy so we could avoid the try-except,
86 # but I (marmoute) prefer this explicit code.
86 # but I (marmoute) prefer this explicit code.
87 class dummyui(object):
87 class dummyui(object):
88 def debug(self, msg):
88 def debug(self, msg):
89 pass
89 pass
90 stripext = extensions.load(dummyui(), 'strip', '')
90 stripext = extensions.load(dummyui(), 'strip', '')
91
91
92 strip = stripext.strip
92 strip = stripext.strip
93 checksubstate = stripext.checksubstate
93 checksubstate = stripext.checksubstate
94 checklocalchanges = stripext.checklocalchanges
94 checklocalchanges = stripext.checklocalchanges
95
95
96
96
97 # Patch names looks like unix-file names.
97 # Patch names looks like unix-file names.
98 # They must be joinable with queue directory and result in the patch path.
98 # They must be joinable with queue directory and result in the patch path.
99 normname = util.normpath
99 normname = util.normpath
100
100
101 class statusentry(object):
101 class statusentry(object):
102 def __init__(self, node, name):
102 def __init__(self, node, name):
103 self.node, self.name = node, name
103 self.node, self.name = node, name
104 def __repr__(self):
104 def __repr__(self):
105 return hex(self.node) + ':' + self.name
105 return hex(self.node) + ':' + self.name
106
106
107 # The order of the headers in 'hg export' HG patches:
107 # The order of the headers in 'hg export' HG patches:
108 HGHEADERS = [
108 HGHEADERS = [
109 # '# HG changeset patch',
109 # '# HG changeset patch',
110 '# User ',
110 '# User ',
111 '# Date ',
111 '# Date ',
112 '# ',
112 '# ',
113 '# Branch ',
113 '# Branch ',
114 '# Node ID ',
114 '# Node ID ',
115 '# Parent ', # can occur twice for merges - but that is not relevant for mq
115 '# Parent ', # can occur twice for merges - but that is not relevant for mq
116 ]
116 ]
117 # The order of headers in plain 'mail style' patches:
117 # The order of headers in plain 'mail style' patches:
118 PLAINHEADERS = {
118 PLAINHEADERS = {
119 'from': 0,
119 'from': 0,
120 'date': 1,
120 'date': 1,
121 'subject': 2,
121 'subject': 2,
122 }
122 }
123
123
124 def inserthgheader(lines, header, value):
124 def inserthgheader(lines, header, value):
125 """Assuming lines contains a HG patch header, add a header line with value.
125 """Assuming lines contains a HG patch header, add a header line with value.
126 >>> try: inserthgheader([], '# Date ', 'z')
126 >>> try: inserthgheader([], '# Date ', 'z')
127 ... except ValueError, inst: print "oops"
127 ... except ValueError, inst: print "oops"
128 oops
128 oops
129 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
129 >>> inserthgheader(['# HG changeset patch'], '# Date ', 'z')
130 ['# HG changeset patch', '# Date z']
130 ['# HG changeset patch', '# Date z']
131 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
131 >>> inserthgheader(['# HG changeset patch', ''], '# Date ', 'z')
132 ['# HG changeset patch', '# Date z', '']
132 ['# HG changeset patch', '# Date z', '']
133 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
133 >>> inserthgheader(['# HG changeset patch', '# User y'], '# Date ', 'z')
134 ['# HG changeset patch', '# User y', '# Date z']
134 ['# HG changeset patch', '# User y', '# Date z']
135 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
135 >>> inserthgheader(['# HG changeset patch', '# Date x', '# User y'],
136 ... '# User ', 'z')
136 ... '# User ', 'z')
137 ['# HG changeset patch', '# Date x', '# User z']
137 ['# HG changeset patch', '# Date x', '# User z']
138 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
138 >>> inserthgheader(['# HG changeset patch', '# Date y'], '# Date ', 'z')
139 ['# HG changeset patch', '# Date z']
139 ['# HG changeset patch', '# Date z']
140 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
140 >>> inserthgheader(['# HG changeset patch', '', '# Date y'], '# Date ', 'z')
141 ['# HG changeset patch', '# Date z', '', '# Date y']
141 ['# HG changeset patch', '# Date z', '', '# Date y']
142 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
142 >>> inserthgheader(['# HG changeset patch', '# Parent y'], '# Date ', 'z')
143 ['# HG changeset patch', '# Date z', '# Parent y']
143 ['# HG changeset patch', '# Date z', '# Parent y']
144 """
144 """
145 start = lines.index('# HG changeset patch') + 1
145 start = lines.index('# HG changeset patch') + 1
146 newindex = HGHEADERS.index(header)
146 newindex = HGHEADERS.index(header)
147 bestpos = len(lines)
147 bestpos = len(lines)
148 for i in range(start, len(lines)):
148 for i in range(start, len(lines)):
149 line = lines[i]
149 line = lines[i]
150 if not line.startswith('# '):
150 if not line.startswith('# '):
151 bestpos = min(bestpos, i)
151 bestpos = min(bestpos, i)
152 break
152 break
153 for lineindex, h in enumerate(HGHEADERS):
153 for lineindex, h in enumerate(HGHEADERS):
154 if line.startswith(h):
154 if line.startswith(h):
155 if lineindex == newindex:
155 if lineindex == newindex:
156 lines[i] = header + value
156 lines[i] = header + value
157 return lines
157 return lines
158 if lineindex > newindex:
158 if lineindex > newindex:
159 bestpos = min(bestpos, i)
159 bestpos = min(bestpos, i)
160 break # next line
160 break # next line
161 lines.insert(bestpos, header + value)
161 lines.insert(bestpos, header + value)
162 return lines
162 return lines
163
163
164 def insertplainheader(lines, header, value):
164 def insertplainheader(lines, header, value):
165 """For lines containing a plain patch header, add a header line with value.
165 """For lines containing a plain patch header, add a header line with value.
166 >>> insertplainheader([], 'Date', 'z')
166 >>> insertplainheader([], 'Date', 'z')
167 ['Date: z']
167 ['Date: z']
168 >>> insertplainheader([''], 'Date', 'z')
168 >>> insertplainheader([''], 'Date', 'z')
169 ['Date: z', '']
169 ['Date: z', '']
170 >>> insertplainheader(['x'], 'Date', 'z')
170 >>> insertplainheader(['x'], 'Date', 'z')
171 ['Date: z', '', 'x']
171 ['Date: z', '', 'x']
172 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
172 >>> insertplainheader(['From: y', 'x'], 'Date', 'z')
173 ['From: y', 'Date: z', '', 'x']
173 ['From: y', 'Date: z', '', 'x']
174 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
174 >>> insertplainheader([' date : x', ' from : y', ''], 'From', 'z')
175 [' date : x', 'From: z', '']
175 [' date : x', 'From: z', '']
176 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
176 >>> insertplainheader(['', 'Date: y'], 'Date', 'z')
177 ['Date: z', '', 'Date: y']
177 ['Date: z', '', 'Date: y']
178 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
178 >>> insertplainheader(['foo: bar', 'DATE: z', 'x'], 'From', 'y')
179 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
179 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
180 """
180 """
181 newprio = PLAINHEADERS[header.lower()]
181 newprio = PLAINHEADERS[header.lower()]
182 bestpos = len(lines)
182 bestpos = len(lines)
183 for i, line in enumerate(lines):
183 for i, line in enumerate(lines):
184 if ':' in line:
184 if ':' in line:
185 lheader = line.split(':', 1)[0].strip().lower()
185 lheader = line.split(':', 1)[0].strip().lower()
186 lprio = PLAINHEADERS.get(lheader, newprio + 1)
186 lprio = PLAINHEADERS.get(lheader, newprio + 1)
187 if lprio == newprio:
187 if lprio == newprio:
188 lines[i] = '%s: %s' % (header, value)
188 lines[i] = '%s: %s' % (header, value)
189 return lines
189 return lines
190 if lprio > newprio and i < bestpos:
190 if lprio > newprio and i < bestpos:
191 bestpos = i
191 bestpos = i
192 else:
192 else:
193 if line:
193 if line:
194 lines.insert(i, '')
194 lines.insert(i, '')
195 if i < bestpos:
195 if i < bestpos:
196 bestpos = i
196 bestpos = i
197 break
197 break
198 lines.insert(bestpos, '%s: %s' % (header, value))
198 lines.insert(bestpos, '%s: %s' % (header, value))
199 return lines
199 return lines
200
200
201 class patchheader(object):
201 class patchheader(object):
202 def __init__(self, pf, plainmode=False):
202 def __init__(self, pf, plainmode=False):
203 def eatdiff(lines):
203 def eatdiff(lines):
204 while lines:
204 while lines:
205 l = lines[-1]
205 l = lines[-1]
206 if (l.startswith("diff -") or
206 if (l.startswith("diff -") or
207 l.startswith("Index:") or
207 l.startswith("Index:") or
208 l.startswith("===========")):
208 l.startswith("===========")):
209 del lines[-1]
209 del lines[-1]
210 else:
210 else:
211 break
211 break
212 def eatempty(lines):
212 def eatempty(lines):
213 while lines:
213 while lines:
214 if not lines[-1].strip():
214 if not lines[-1].strip():
215 del lines[-1]
215 del lines[-1]
216 else:
216 else:
217 break
217 break
218
218
219 message = []
219 message = []
220 comments = []
220 comments = []
221 user = None
221 user = None
222 date = None
222 date = None
223 parent = None
223 parent = None
224 format = None
224 format = None
225 subject = None
225 subject = None
226 branch = None
226 branch = None
227 nodeid = None
227 nodeid = None
228 diffstart = 0
228 diffstart = 0
229
229
230 for line in file(pf):
230 for line in file(pf):
231 line = line.rstrip()
231 line = line.rstrip()
232 if (line.startswith('diff --git')
232 if (line.startswith('diff --git')
233 or (diffstart and line.startswith('+++ '))):
233 or (diffstart and line.startswith('+++ '))):
234 diffstart = 2
234 diffstart = 2
235 break
235 break
236 diffstart = 0 # reset
236 diffstart = 0 # reset
237 if line.startswith("--- "):
237 if line.startswith("--- "):
238 diffstart = 1
238 diffstart = 1
239 continue
239 continue
240 elif format == "hgpatch":
240 elif format == "hgpatch":
241 # parse values when importing the result of an hg export
241 # parse values when importing the result of an hg export
242 if line.startswith("# User "):
242 if line.startswith("# User "):
243 user = line[7:]
243 user = line[7:]
244 elif line.startswith("# Date "):
244 elif line.startswith("# Date "):
245 date = line[7:]
245 date = line[7:]
246 elif line.startswith("# Parent "):
246 elif line.startswith("# Parent "):
247 parent = line[9:].lstrip() # handle double trailing space
247 parent = line[9:].lstrip() # handle double trailing space
248 elif line.startswith("# Branch "):
248 elif line.startswith("# Branch "):
249 branch = line[9:]
249 branch = line[9:]
250 elif line.startswith("# Node ID "):
250 elif line.startswith("# Node ID "):
251 nodeid = line[10:]
251 nodeid = line[10:]
252 elif not line.startswith("# ") and line:
252 elif not line.startswith("# ") and line:
253 message.append(line)
253 message.append(line)
254 format = None
254 format = None
255 elif line == '# HG changeset patch':
255 elif line == '# HG changeset patch':
256 message = []
256 message = []
257 format = "hgpatch"
257 format = "hgpatch"
258 elif (format != "tagdone" and (line.startswith("Subject: ") or
258 elif (format != "tagdone" and (line.startswith("Subject: ") or
259 line.startswith("subject: "))):
259 line.startswith("subject: "))):
260 subject = line[9:]
260 subject = line[9:]
261 format = "tag"
261 format = "tag"
262 elif (format != "tagdone" and (line.startswith("From: ") or
262 elif (format != "tagdone" and (line.startswith("From: ") or
263 line.startswith("from: "))):
263 line.startswith("from: "))):
264 user = line[6:]
264 user = line[6:]
265 format = "tag"
265 format = "tag"
266 elif (format != "tagdone" and (line.startswith("Date: ") or
266 elif (format != "tagdone" and (line.startswith("Date: ") or
267 line.startswith("date: "))):
267 line.startswith("date: "))):
268 date = line[6:]
268 date = line[6:]
269 format = "tag"
269 format = "tag"
270 elif format == "tag" and line == "":
270 elif format == "tag" and line == "":
271 # when looking for tags (subject: from: etc) they
271 # when looking for tags (subject: from: etc) they
272 # end once you find a blank line in the source
272 # end once you find a blank line in the source
273 format = "tagdone"
273 format = "tagdone"
274 elif message or line:
274 elif message or line:
275 message.append(line)
275 message.append(line)
276 comments.append(line)
276 comments.append(line)
277
277
278 eatdiff(message)
278 eatdiff(message)
279 eatdiff(comments)
279 eatdiff(comments)
280 # Remember the exact starting line of the patch diffs before consuming
280 # Remember the exact starting line of the patch diffs before consuming
281 # empty lines, for external use by TortoiseHg and others
281 # empty lines, for external use by TortoiseHg and others
282 self.diffstartline = len(comments)
282 self.diffstartline = len(comments)
283 eatempty(message)
283 eatempty(message)
284 eatempty(comments)
284 eatempty(comments)
285
285
286 # make sure message isn't empty
286 # make sure message isn't empty
287 if format and format.startswith("tag") and subject:
287 if format and format.startswith("tag") and subject:
288 message.insert(0, subject)
288 message.insert(0, subject)
289
289
290 self.message = message
290 self.message = message
291 self.comments = comments
291 self.comments = comments
292 self.user = user
292 self.user = user
293 self.date = date
293 self.date = date
294 self.parent = parent
294 self.parent = parent
295 # nodeid and branch are for external use by TortoiseHg and others
295 # nodeid and branch are for external use by TortoiseHg and others
296 self.nodeid = nodeid
296 self.nodeid = nodeid
297 self.branch = branch
297 self.branch = branch
298 self.haspatch = diffstart > 1
298 self.haspatch = diffstart > 1
299 self.plainmode = (plainmode or
299 self.plainmode = (plainmode or
300 '# HG changeset patch' not in self.comments and
300 '# HG changeset patch' not in self.comments and
301 util.any(c.startswith('Date: ') or
301 util.any(c.startswith('Date: ') or
302 c.startswith('From: ')
302 c.startswith('From: ')
303 for c in self.comments))
303 for c in self.comments))
304
304
305 def setuser(self, user):
305 def setuser(self, user):
306 try:
306 try:
307 inserthgheader(self.comments, '# User ', user)
307 inserthgheader(self.comments, '# User ', user)
308 except ValueError:
308 except ValueError:
309 if self.plainmode:
309 if self.plainmode:
310 insertplainheader(self.comments, 'From', user)
310 insertplainheader(self.comments, 'From', user)
311 else:
311 else:
312 tmp = ['# HG changeset patch', '# User ' + user]
312 tmp = ['# HG changeset patch', '# User ' + user]
313 self.comments = tmp + self.comments
313 self.comments = tmp + self.comments
314 self.user = user
314 self.user = user
315
315
316 def setdate(self, date):
316 def setdate(self, date):
317 try:
317 try:
318 inserthgheader(self.comments, '# Date ', date)
318 inserthgheader(self.comments, '# Date ', date)
319 except ValueError:
319 except ValueError:
320 if self.plainmode:
320 if self.plainmode:
321 insertplainheader(self.comments, 'Date', date)
321 insertplainheader(self.comments, 'Date', date)
322 else:
322 else:
323 tmp = ['# HG changeset patch', '# Date ' + date]
323 tmp = ['# HG changeset patch', '# Date ' + date]
324 self.comments = tmp + self.comments
324 self.comments = tmp + self.comments
325 self.date = date
325 self.date = date
326
326
327 def setparent(self, parent):
327 def setparent(self, parent):
328 try:
328 try:
329 inserthgheader(self.comments, '# Parent ', parent)
329 inserthgheader(self.comments, '# Parent ', parent)
330 except ValueError:
330 except ValueError:
331 if not self.plainmode:
331 if not self.plainmode:
332 tmp = ['# HG changeset patch', '# Parent ' + parent]
332 tmp = ['# HG changeset patch', '# Parent ' + parent]
333 self.comments = tmp + self.comments
333 self.comments = tmp + self.comments
334 self.parent = parent
334 self.parent = parent
335
335
336 def setmessage(self, message):
336 def setmessage(self, message):
337 if self.comments:
337 if self.comments:
338 self._delmsg()
338 self._delmsg()
339 self.message = [message]
339 self.message = [message]
340 if message:
340 if message:
341 if self.plainmode and self.comments and self.comments[-1]:
341 if self.plainmode and self.comments and self.comments[-1]:
342 self.comments.append('')
342 self.comments.append('')
343 self.comments.append(message)
343 self.comments.append(message)
344
344
345 def __str__(self):
345 def __str__(self):
346 s = '\n'.join(self.comments).rstrip()
346 s = '\n'.join(self.comments).rstrip()
347 if not s:
347 if not s:
348 return ''
348 return ''
349 return s + '\n\n'
349 return s + '\n\n'
350
350
351 def _delmsg(self):
351 def _delmsg(self):
352 '''Remove existing message, keeping the rest of the comments fields.
352 '''Remove existing message, keeping the rest of the comments fields.
353 If comments contains 'subject: ', message will prepend
353 If comments contains 'subject: ', message will prepend
354 the field and a blank line.'''
354 the field and a blank line.'''
355 if self.message:
355 if self.message:
356 subj = 'subject: ' + self.message[0].lower()
356 subj = 'subject: ' + self.message[0].lower()
357 for i in xrange(len(self.comments)):
357 for i in xrange(len(self.comments)):
358 if subj == self.comments[i].lower():
358 if subj == self.comments[i].lower():
359 del self.comments[i]
359 del self.comments[i]
360 self.message = self.message[2:]
360 self.message = self.message[2:]
361 break
361 break
362 ci = 0
362 ci = 0
363 for mi in self.message:
363 for mi in self.message:
364 while mi != self.comments[ci]:
364 while mi != self.comments[ci]:
365 ci += 1
365 ci += 1
366 del self.comments[ci]
366 del self.comments[ci]
367
367
368 def newcommit(repo, phase, *args, **kwargs):
368 def newcommit(repo, phase, *args, **kwargs):
369 """helper dedicated to ensure a commit respect mq.secret setting
369 """helper dedicated to ensure a commit respect mq.secret setting
370
370
371 It should be used instead of repo.commit inside the mq source for operation
371 It should be used instead of repo.commit inside the mq source for operation
372 creating new changeset.
372 creating new changeset.
373 """
373 """
374 repo = repo.unfiltered()
374 repo = repo.unfiltered()
375 if phase is None:
375 if phase is None:
376 if repo.ui.configbool('mq', 'secret', False):
376 if repo.ui.configbool('mq', 'secret', False):
377 phase = phases.secret
377 phase = phases.secret
378 if phase is not None:
378 if phase is not None:
379 backup = repo.ui.backupconfig('phases', 'new-commit')
379 backup = repo.ui.backupconfig('phases', 'new-commit')
380 try:
380 try:
381 if phase is not None:
381 if phase is not None:
382 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
382 repo.ui.setconfig('phases', 'new-commit', phase, 'mq')
383 return repo.commit(*args, **kwargs)
383 return repo.commit(*args, **kwargs)
384 finally:
384 finally:
385 if phase is not None:
385 if phase is not None:
386 repo.ui.restoreconfig(backup)
386 repo.ui.restoreconfig(backup)
387
387
388 class AbortNoCleanup(error.Abort):
388 class AbortNoCleanup(error.Abort):
389 pass
389 pass
390
390
391 class queue(object):
391 class queue(object):
392 def __init__(self, ui, baseui, path, patchdir=None):
392 def __init__(self, ui, baseui, path, patchdir=None):
393 self.basepath = path
393 self.basepath = path
394 try:
394 try:
395 fh = open(os.path.join(path, 'patches.queue'))
395 fh = open(os.path.join(path, 'patches.queue'))
396 cur = fh.read().rstrip()
396 cur = fh.read().rstrip()
397 fh.close()
397 fh.close()
398 if not cur:
398 if not cur:
399 curpath = os.path.join(path, 'patches')
399 curpath = os.path.join(path, 'patches')
400 else:
400 else:
401 curpath = os.path.join(path, 'patches-' + cur)
401 curpath = os.path.join(path, 'patches-' + cur)
402 except IOError:
402 except IOError:
403 curpath = os.path.join(path, 'patches')
403 curpath = os.path.join(path, 'patches')
404 self.path = patchdir or curpath
404 self.path = patchdir or curpath
405 self.opener = scmutil.opener(self.path)
405 self.opener = scmutil.opener(self.path)
406 self.ui = ui
406 self.ui = ui
407 self.baseui = baseui
407 self.baseui = baseui
408 self.applieddirty = False
408 self.applieddirty = False
409 self.seriesdirty = False
409 self.seriesdirty = False
410 self.added = []
410 self.added = []
411 self.seriespath = "series"
411 self.seriespath = "series"
412 self.statuspath = "status"
412 self.statuspath = "status"
413 self.guardspath = "guards"
413 self.guardspath = "guards"
414 self.activeguards = None
414 self.activeguards = None
415 self.guardsdirty = False
415 self.guardsdirty = False
416 # Handle mq.git as a bool with extended values
416 # Handle mq.git as a bool with extended values
417 try:
417 try:
418 gitmode = ui.configbool('mq', 'git', None)
418 gitmode = ui.configbool('mq', 'git', None)
419 if gitmode is None:
419 if gitmode is None:
420 raise error.ConfigError
420 raise error.ConfigError
421 self.gitmode = gitmode and 'yes' or 'no'
421 self.gitmode = gitmode and 'yes' or 'no'
422 except error.ConfigError:
422 except error.ConfigError:
423 self.gitmode = ui.config('mq', 'git', 'auto').lower()
423 self.gitmode = ui.config('mq', 'git', 'auto').lower()
424 self.plainmode = ui.configbool('mq', 'plain', False)
424 self.plainmode = ui.configbool('mq', 'plain', False)
425 self.checkapplied = True
425 self.checkapplied = True
426
426
427 @util.propertycache
427 @util.propertycache
428 def applied(self):
428 def applied(self):
429 def parselines(lines):
429 def parselines(lines):
430 for l in lines:
430 for l in lines:
431 entry = l.split(':', 1)
431 entry = l.split(':', 1)
432 if len(entry) > 1:
432 if len(entry) > 1:
433 n, name = entry
433 n, name = entry
434 yield statusentry(bin(n), name)
434 yield statusentry(bin(n), name)
435 elif l.strip():
435 elif l.strip():
436 self.ui.warn(_('malformated mq status line: %s\n') % entry)
436 self.ui.warn(_('malformated mq status line: %s\n') % entry)
437 # else we ignore empty lines
437 # else we ignore empty lines
438 try:
438 try:
439 lines = self.opener.read(self.statuspath).splitlines()
439 lines = self.opener.read(self.statuspath).splitlines()
440 return list(parselines(lines))
440 return list(parselines(lines))
441 except IOError, e:
441 except IOError, e:
442 if e.errno == errno.ENOENT:
442 if e.errno == errno.ENOENT:
443 return []
443 return []
444 raise
444 raise
445
445
446 @util.propertycache
446 @util.propertycache
447 def fullseries(self):
447 def fullseries(self):
448 try:
448 try:
449 return self.opener.read(self.seriespath).splitlines()
449 return self.opener.read(self.seriespath).splitlines()
450 except IOError, e:
450 except IOError, e:
451 if e.errno == errno.ENOENT:
451 if e.errno == errno.ENOENT:
452 return []
452 return []
453 raise
453 raise
454
454
455 @util.propertycache
455 @util.propertycache
456 def series(self):
456 def series(self):
457 self.parseseries()
457 self.parseseries()
458 return self.series
458 return self.series
459
459
460 @util.propertycache
460 @util.propertycache
461 def seriesguards(self):
461 def seriesguards(self):
462 self.parseseries()
462 self.parseseries()
463 return self.seriesguards
463 return self.seriesguards
464
464
465 def invalidate(self):
465 def invalidate(self):
466 for a in 'applied fullseries series seriesguards'.split():
466 for a in 'applied fullseries series seriesguards'.split():
467 if a in self.__dict__:
467 if a in self.__dict__:
468 delattr(self, a)
468 delattr(self, a)
469 self.applieddirty = False
469 self.applieddirty = False
470 self.seriesdirty = False
470 self.seriesdirty = False
471 self.guardsdirty = False
471 self.guardsdirty = False
472 self.activeguards = None
472 self.activeguards = None
473
473
474 def diffopts(self, opts={}, patchfn=None):
474 def diffopts(self, opts={}, patchfn=None):
475 diffopts = patchmod.diffopts(self.ui, opts)
475 diffopts = patchmod.diffopts(self.ui, opts)
476 if self.gitmode == 'auto':
476 if self.gitmode == 'auto':
477 diffopts.upgrade = True
477 diffopts.upgrade = True
478 elif self.gitmode == 'keep':
478 elif self.gitmode == 'keep':
479 pass
479 pass
480 elif self.gitmode in ('yes', 'no'):
480 elif self.gitmode in ('yes', 'no'):
481 diffopts.git = self.gitmode == 'yes'
481 diffopts.git = self.gitmode == 'yes'
482 else:
482 else:
483 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
483 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
484 ' got %s') % self.gitmode)
484 ' got %s') % self.gitmode)
485 if patchfn:
485 if patchfn:
486 diffopts = self.patchopts(diffopts, patchfn)
486 diffopts = self.patchopts(diffopts, patchfn)
487 return diffopts
487 return diffopts
488
488
489 def patchopts(self, diffopts, *patches):
489 def patchopts(self, diffopts, *patches):
490 """Return a copy of input diff options with git set to true if
490 """Return a copy of input diff options with git set to true if
491 referenced patch is a git patch and should be preserved as such.
491 referenced patch is a git patch and should be preserved as such.
492 """
492 """
493 diffopts = diffopts.copy()
493 diffopts = diffopts.copy()
494 if not diffopts.git and self.gitmode == 'keep':
494 if not diffopts.git and self.gitmode == 'keep':
495 for patchfn in patches:
495 for patchfn in patches:
496 patchf = self.opener(patchfn, 'r')
496 patchf = self.opener(patchfn, 'r')
497 # if the patch was a git patch, refresh it as a git patch
497 # if the patch was a git patch, refresh it as a git patch
498 for line in patchf:
498 for line in patchf:
499 if line.startswith('diff --git'):
499 if line.startswith('diff --git'):
500 diffopts.git = True
500 diffopts.git = True
501 break
501 break
502 patchf.close()
502 patchf.close()
503 return diffopts
503 return diffopts
504
504
505 def join(self, *p):
505 def join(self, *p):
506 return os.path.join(self.path, *p)
506 return os.path.join(self.path, *p)
507
507
508 def findseries(self, patch):
508 def findseries(self, patch):
509 def matchpatch(l):
509 def matchpatch(l):
510 l = l.split('#', 1)[0]
510 l = l.split('#', 1)[0]
511 return l.strip() == patch
511 return l.strip() == patch
512 for index, l in enumerate(self.fullseries):
512 for index, l in enumerate(self.fullseries):
513 if matchpatch(l):
513 if matchpatch(l):
514 return index
514 return index
515 return None
515 return None
516
516
517 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
517 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
518
518
519 def parseseries(self):
519 def parseseries(self):
520 self.series = []
520 self.series = []
521 self.seriesguards = []
521 self.seriesguards = []
522 for l in self.fullseries:
522 for l in self.fullseries:
523 h = l.find('#')
523 h = l.find('#')
524 if h == -1:
524 if h == -1:
525 patch = l
525 patch = l
526 comment = ''
526 comment = ''
527 elif h == 0:
527 elif h == 0:
528 continue
528 continue
529 else:
529 else:
530 patch = l[:h]
530 patch = l[:h]
531 comment = l[h:]
531 comment = l[h:]
532 patch = patch.strip()
532 patch = patch.strip()
533 if patch:
533 if patch:
534 if patch in self.series:
534 if patch in self.series:
535 raise util.Abort(_('%s appears more than once in %s') %
535 raise util.Abort(_('%s appears more than once in %s') %
536 (patch, self.join(self.seriespath)))
536 (patch, self.join(self.seriespath)))
537 self.series.append(patch)
537 self.series.append(patch)
538 self.seriesguards.append(self.guard_re.findall(comment))
538 self.seriesguards.append(self.guard_re.findall(comment))
539
539
540 def checkguard(self, guard):
540 def checkguard(self, guard):
541 if not guard:
541 if not guard:
542 return _('guard cannot be an empty string')
542 return _('guard cannot be an empty string')
543 bad_chars = '# \t\r\n\f'
543 bad_chars = '# \t\r\n\f'
544 first = guard[0]
544 first = guard[0]
545 if first in '-+':
545 if first in '-+':
546 return (_('guard %r starts with invalid character: %r') %
546 return (_('guard %r starts with invalid character: %r') %
547 (guard, first))
547 (guard, first))
548 for c in bad_chars:
548 for c in bad_chars:
549 if c in guard:
549 if c in guard:
550 return _('invalid character in guard %r: %r') % (guard, c)
550 return _('invalid character in guard %r: %r') % (guard, c)
551
551
552 def setactive(self, guards):
552 def setactive(self, guards):
553 for guard in guards:
553 for guard in guards:
554 bad = self.checkguard(guard)
554 bad = self.checkguard(guard)
555 if bad:
555 if bad:
556 raise util.Abort(bad)
556 raise util.Abort(bad)
557 guards = sorted(set(guards))
557 guards = sorted(set(guards))
558 self.ui.debug('active guards: %s\n' % ' '.join(guards))
558 self.ui.debug('active guards: %s\n' % ' '.join(guards))
559 self.activeguards = guards
559 self.activeguards = guards
560 self.guardsdirty = True
560 self.guardsdirty = True
561
561
562 def active(self):
562 def active(self):
563 if self.activeguards is None:
563 if self.activeguards is None:
564 self.activeguards = []
564 self.activeguards = []
565 try:
565 try:
566 guards = self.opener.read(self.guardspath).split()
566 guards = self.opener.read(self.guardspath).split()
567 except IOError, err:
567 except IOError, err:
568 if err.errno != errno.ENOENT:
568 if err.errno != errno.ENOENT:
569 raise
569 raise
570 guards = []
570 guards = []
571 for i, guard in enumerate(guards):
571 for i, guard in enumerate(guards):
572 bad = self.checkguard(guard)
572 bad = self.checkguard(guard)
573 if bad:
573 if bad:
574 self.ui.warn('%s:%d: %s\n' %
574 self.ui.warn('%s:%d: %s\n' %
575 (self.join(self.guardspath), i + 1, bad))
575 (self.join(self.guardspath), i + 1, bad))
576 else:
576 else:
577 self.activeguards.append(guard)
577 self.activeguards.append(guard)
578 return self.activeguards
578 return self.activeguards
579
579
580 def setguards(self, idx, guards):
580 def setguards(self, idx, guards):
581 for g in guards:
581 for g in guards:
582 if len(g) < 2:
582 if len(g) < 2:
583 raise util.Abort(_('guard %r too short') % g)
583 raise util.Abort(_('guard %r too short') % g)
584 if g[0] not in '-+':
584 if g[0] not in '-+':
585 raise util.Abort(_('guard %r starts with invalid char') % g)
585 raise util.Abort(_('guard %r starts with invalid char') % g)
586 bad = self.checkguard(g[1:])
586 bad = self.checkguard(g[1:])
587 if bad:
587 if bad:
588 raise util.Abort(bad)
588 raise util.Abort(bad)
589 drop = self.guard_re.sub('', self.fullseries[idx])
589 drop = self.guard_re.sub('', self.fullseries[idx])
590 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
590 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
591 self.parseseries()
591 self.parseseries()
592 self.seriesdirty = True
592 self.seriesdirty = True
593
593
594 def pushable(self, idx):
594 def pushable(self, idx):
595 if isinstance(idx, str):
595 if isinstance(idx, str):
596 idx = self.series.index(idx)
596 idx = self.series.index(idx)
597 patchguards = self.seriesguards[idx]
597 patchguards = self.seriesguards[idx]
598 if not patchguards:
598 if not patchguards:
599 return True, None
599 return True, None
600 guards = self.active()
600 guards = self.active()
601 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
601 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
602 if exactneg:
602 if exactneg:
603 return False, repr(exactneg[0])
603 return False, repr(exactneg[0])
604 pos = [g for g in patchguards if g[0] == '+']
604 pos = [g for g in patchguards if g[0] == '+']
605 exactpos = [g for g in pos if g[1:] in guards]
605 exactpos = [g for g in pos if g[1:] in guards]
606 if pos:
606 if pos:
607 if exactpos:
607 if exactpos:
608 return True, repr(exactpos[0])
608 return True, repr(exactpos[0])
609 return False, ' '.join(map(repr, pos))
609 return False, ' '.join(map(repr, pos))
610 return True, ''
610 return True, ''
611
611
612 def explainpushable(self, idx, all_patches=False):
612 def explainpushable(self, idx, all_patches=False):
613 write = all_patches and self.ui.write or self.ui.warn
613 write = all_patches and self.ui.write or self.ui.warn
614 if all_patches or self.ui.verbose:
614 if all_patches or self.ui.verbose:
615 if isinstance(idx, str):
615 if isinstance(idx, str):
616 idx = self.series.index(idx)
616 idx = self.series.index(idx)
617 pushable, why = self.pushable(idx)
617 pushable, why = self.pushable(idx)
618 if all_patches and pushable:
618 if all_patches and pushable:
619 if why is None:
619 if why is None:
620 write(_('allowing %s - no guards in effect\n') %
620 write(_('allowing %s - no guards in effect\n') %
621 self.series[idx])
621 self.series[idx])
622 else:
622 else:
623 if not why:
623 if not why:
624 write(_('allowing %s - no matching negative guards\n') %
624 write(_('allowing %s - no matching negative guards\n') %
625 self.series[idx])
625 self.series[idx])
626 else:
626 else:
627 write(_('allowing %s - guarded by %s\n') %
627 write(_('allowing %s - guarded by %s\n') %
628 (self.series[idx], why))
628 (self.series[idx], why))
629 if not pushable:
629 if not pushable:
630 if why:
630 if why:
631 write(_('skipping %s - guarded by %s\n') %
631 write(_('skipping %s - guarded by %s\n') %
632 (self.series[idx], why))
632 (self.series[idx], why))
633 else:
633 else:
634 write(_('skipping %s - no matching guards\n') %
634 write(_('skipping %s - no matching guards\n') %
635 self.series[idx])
635 self.series[idx])
636
636
637 def savedirty(self):
637 def savedirty(self):
638 def writelist(items, path):
638 def writelist(items, path):
639 fp = self.opener(path, 'w')
639 fp = self.opener(path, 'w')
640 for i in items:
640 for i in items:
641 fp.write("%s\n" % i)
641 fp.write("%s\n" % i)
642 fp.close()
642 fp.close()
643 if self.applieddirty:
643 if self.applieddirty:
644 writelist(map(str, self.applied), self.statuspath)
644 writelist(map(str, self.applied), self.statuspath)
645 self.applieddirty = False
645 self.applieddirty = False
646 if self.seriesdirty:
646 if self.seriesdirty:
647 writelist(self.fullseries, self.seriespath)
647 writelist(self.fullseries, self.seriespath)
648 self.seriesdirty = False
648 self.seriesdirty = False
649 if self.guardsdirty:
649 if self.guardsdirty:
650 writelist(self.activeguards, self.guardspath)
650 writelist(self.activeguards, self.guardspath)
651 self.guardsdirty = False
651 self.guardsdirty = False
652 if self.added:
652 if self.added:
653 qrepo = self.qrepo()
653 qrepo = self.qrepo()
654 if qrepo:
654 if qrepo:
655 qrepo[None].add(f for f in self.added if f not in qrepo[None])
655 qrepo[None].add(f for f in self.added if f not in qrepo[None])
656 self.added = []
656 self.added = []
657
657
658 def removeundo(self, repo):
658 def removeundo(self, repo):
659 undo = repo.sjoin('undo')
659 undo = repo.sjoin('undo')
660 if not os.path.exists(undo):
660 if not os.path.exists(undo):
661 return
661 return
662 try:
662 try:
663 os.unlink(undo)
663 os.unlink(undo)
664 except OSError, inst:
664 except OSError, inst:
665 self.ui.warn(_('error removing undo: %s\n') % str(inst))
665 self.ui.warn(_('error removing undo: %s\n') % str(inst))
666
666
667 def backup(self, repo, files, copy=False):
667 def backup(self, repo, files, copy=False):
668 # backup local changes in --force case
668 # backup local changes in --force case
669 for f in sorted(files):
669 for f in sorted(files):
670 absf = repo.wjoin(f)
670 absf = repo.wjoin(f)
671 if os.path.lexists(absf):
671 if os.path.lexists(absf):
672 self.ui.note(_('saving current version of %s as %s\n') %
672 self.ui.note(_('saving current version of %s as %s\n') %
673 (f, f + '.orig'))
673 (f, f + '.orig'))
674 if copy:
674 if copy:
675 util.copyfile(absf, absf + '.orig')
675 util.copyfile(absf, absf + '.orig')
676 else:
676 else:
677 util.rename(absf, absf + '.orig')
677 util.rename(absf, absf + '.orig')
678
678
679 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
679 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
680 fp=None, changes=None, opts={}):
680 fp=None, changes=None, opts={}):
681 stat = opts.get('stat')
681 stat = opts.get('stat')
682 m = scmutil.match(repo[node1], files, opts)
682 m = scmutil.match(repo[node1], files, opts)
683 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
683 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
684 changes, stat, fp)
684 changes, stat, fp)
685
685
686 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
686 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
687 # first try just applying the patch
687 # first try just applying the patch
688 (err, n) = self.apply(repo, [patch], update_status=False,
688 (err, n) = self.apply(repo, [patch], update_status=False,
689 strict=True, merge=rev)
689 strict=True, merge=rev)
690
690
691 if err == 0:
691 if err == 0:
692 return (err, n)
692 return (err, n)
693
693
694 if n is None:
694 if n is None:
695 raise util.Abort(_("apply failed for patch %s") % patch)
695 raise util.Abort(_("apply failed for patch %s") % patch)
696
696
697 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
697 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
698
698
699 # apply failed, strip away that rev and merge.
699 # apply failed, strip away that rev and merge.
700 hg.clean(repo, head)
700 hg.clean(repo, head)
701 strip(self.ui, repo, [n], update=False, backup=False)
701 strip(self.ui, repo, [n], update=False, backup=False)
702
702
703 ctx = repo[rev]
703 ctx = repo[rev]
704 ret = hg.merge(repo, rev)
704 ret = hg.merge(repo, rev)
705 if ret:
705 if ret:
706 raise util.Abort(_("update returned %d") % ret)
706 raise util.Abort(_("update returned %d") % ret)
707 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
707 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
708 if n is None:
708 if n is None:
709 raise util.Abort(_("repo commit failed"))
709 raise util.Abort(_("repo commit failed"))
710 try:
710 try:
711 ph = patchheader(mergeq.join(patch), self.plainmode)
711 ph = patchheader(mergeq.join(patch), self.plainmode)
712 except Exception:
712 except Exception:
713 raise util.Abort(_("unable to read %s") % patch)
713 raise util.Abort(_("unable to read %s") % patch)
714
714
715 diffopts = self.patchopts(diffopts, patch)
715 diffopts = self.patchopts(diffopts, patch)
716 patchf = self.opener(patch, "w")
716 patchf = self.opener(patch, "w")
717 comments = str(ph)
717 comments = str(ph)
718 if comments:
718 if comments:
719 patchf.write(comments)
719 patchf.write(comments)
720 self.printdiff(repo, diffopts, head, n, fp=patchf)
720 self.printdiff(repo, diffopts, head, n, fp=patchf)
721 patchf.close()
721 patchf.close()
722 self.removeundo(repo)
722 self.removeundo(repo)
723 return (0, n)
723 return (0, n)
724
724
725 def qparents(self, repo, rev=None):
725 def qparents(self, repo, rev=None):
726 """return the mq handled parent or p1
726 """return the mq handled parent or p1
727
727
728 In some case where mq get himself in being the parent of a merge the
728 In some case where mq get himself in being the parent of a merge the
729 appropriate parent may be p2.
729 appropriate parent may be p2.
730 (eg: an in progress merge started with mq disabled)
730 (eg: an in progress merge started with mq disabled)
731
731
732 If no parent are managed by mq, p1 is returned.
732 If no parent are managed by mq, p1 is returned.
733 """
733 """
734 if rev is None:
734 if rev is None:
735 (p1, p2) = repo.dirstate.parents()
735 (p1, p2) = repo.dirstate.parents()
736 if p2 == nullid:
736 if p2 == nullid:
737 return p1
737 return p1
738 if not self.applied:
738 if not self.applied:
739 return None
739 return None
740 return self.applied[-1].node
740 return self.applied[-1].node
741 p1, p2 = repo.changelog.parents(rev)
741 p1, p2 = repo.changelog.parents(rev)
742 if p2 != nullid and p2 in [x.node for x in self.applied]:
742 if p2 != nullid and p2 in [x.node for x in self.applied]:
743 return p2
743 return p2
744 return p1
744 return p1
745
745
746 def mergepatch(self, repo, mergeq, series, diffopts):
746 def mergepatch(self, repo, mergeq, series, diffopts):
747 if not self.applied:
747 if not self.applied:
748 # each of the patches merged in will have two parents. This
748 # each of the patches merged in will have two parents. This
749 # can confuse the qrefresh, qdiff, and strip code because it
749 # can confuse the qrefresh, qdiff, and strip code because it
750 # needs to know which parent is actually in the patch queue.
750 # needs to know which parent is actually in the patch queue.
751 # so, we insert a merge marker with only one parent. This way
751 # so, we insert a merge marker with only one parent. This way
752 # the first patch in the queue is never a merge patch
752 # the first patch in the queue is never a merge patch
753 #
753 #
754 pname = ".hg.patches.merge.marker"
754 pname = ".hg.patches.merge.marker"
755 n = newcommit(repo, None, '[mq]: merge marker', force=True)
755 n = newcommit(repo, None, '[mq]: merge marker', force=True)
756 self.removeundo(repo)
756 self.removeundo(repo)
757 self.applied.append(statusentry(n, pname))
757 self.applied.append(statusentry(n, pname))
758 self.applieddirty = True
758 self.applieddirty = True
759
759
760 head = self.qparents(repo)
760 head = self.qparents(repo)
761
761
762 for patch in series:
762 for patch in series:
763 patch = mergeq.lookup(patch, strict=True)
763 patch = mergeq.lookup(patch, strict=True)
764 if not patch:
764 if not patch:
765 self.ui.warn(_("patch %s does not exist\n") % patch)
765 self.ui.warn(_("patch %s does not exist\n") % patch)
766 return (1, None)
766 return (1, None)
767 pushable, reason = self.pushable(patch)
767 pushable, reason = self.pushable(patch)
768 if not pushable:
768 if not pushable:
769 self.explainpushable(patch, all_patches=True)
769 self.explainpushable(patch, all_patches=True)
770 continue
770 continue
771 info = mergeq.isapplied(patch)
771 info = mergeq.isapplied(patch)
772 if not info:
772 if not info:
773 self.ui.warn(_("patch %s is not applied\n") % patch)
773 self.ui.warn(_("patch %s is not applied\n") % patch)
774 return (1, None)
774 return (1, None)
775 rev = info[1]
775 rev = info[1]
776 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
776 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
777 if head:
777 if head:
778 self.applied.append(statusentry(head, patch))
778 self.applied.append(statusentry(head, patch))
779 self.applieddirty = True
779 self.applieddirty = True
780 if err:
780 if err:
781 return (err, head)
781 return (err, head)
782 self.savedirty()
782 self.savedirty()
783 return (0, head)
783 return (0, head)
784
784
785 def patch(self, repo, patchfile):
785 def patch(self, repo, patchfile):
786 '''Apply patchfile to the working directory.
786 '''Apply patchfile to the working directory.
787 patchfile: name of patch file'''
787 patchfile: name of patch file'''
788 files = set()
788 files = set()
789 try:
789 try:
790 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
790 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
791 files=files, eolmode=None)
791 files=files, eolmode=None)
792 return (True, list(files), fuzz)
792 return (True, list(files), fuzz)
793 except Exception, inst:
793 except Exception, inst:
794 self.ui.note(str(inst) + '\n')
794 self.ui.note(str(inst) + '\n')
795 if not self.ui.verbose:
795 if not self.ui.verbose:
796 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
796 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
797 self.ui.traceback()
797 self.ui.traceback()
798 return (False, list(files), False)
798 return (False, list(files), False)
799
799
800 def apply(self, repo, series, list=False, update_status=True,
800 def apply(self, repo, series, list=False, update_status=True,
801 strict=False, patchdir=None, merge=None, all_files=None,
801 strict=False, patchdir=None, merge=None, all_files=None,
802 tobackup=None, keepchanges=False):
802 tobackup=None, keepchanges=False):
803 wlock = lock = tr = None
803 wlock = lock = tr = None
804 try:
804 try:
805 wlock = repo.wlock()
805 wlock = repo.wlock()
806 lock = repo.lock()
806 lock = repo.lock()
807 tr = repo.transaction("qpush")
807 tr = repo.transaction("qpush")
808 try:
808 try:
809 ret = self._apply(repo, series, list, update_status,
809 ret = self._apply(repo, series, list, update_status,
810 strict, patchdir, merge, all_files=all_files,
810 strict, patchdir, merge, all_files=all_files,
811 tobackup=tobackup, keepchanges=keepchanges)
811 tobackup=tobackup, keepchanges=keepchanges)
812 tr.close()
812 tr.close()
813 self.savedirty()
813 self.savedirty()
814 return ret
814 return ret
815 except AbortNoCleanup:
815 except AbortNoCleanup:
816 tr.close()
816 tr.close()
817 self.savedirty()
817 self.savedirty()
818 return 2, repo.dirstate.p1()
818 return 2, repo.dirstate.p1()
819 except: # re-raises
819 except: # re-raises
820 try:
820 try:
821 tr.abort()
821 tr.abort()
822 finally:
822 finally:
823 repo.invalidate()
823 repo.invalidate()
824 repo.dirstate.invalidate()
824 repo.dirstate.invalidate()
825 self.invalidate()
825 self.invalidate()
826 raise
826 raise
827 finally:
827 finally:
828 release(tr, lock, wlock)
828 release(tr, lock, wlock)
829 self.removeundo(repo)
829 self.removeundo(repo)
830
830
831 def _apply(self, repo, series, list=False, update_status=True,
831 def _apply(self, repo, series, list=False, update_status=True,
832 strict=False, patchdir=None, merge=None, all_files=None,
832 strict=False, patchdir=None, merge=None, all_files=None,
833 tobackup=None, keepchanges=False):
833 tobackup=None, keepchanges=False):
834 """returns (error, hash)
834 """returns (error, hash)
835
835
836 error = 1 for unable to read, 2 for patch failed, 3 for patch
836 error = 1 for unable to read, 2 for patch failed, 3 for patch
837 fuzz. tobackup is None or a set of files to backup before they
837 fuzz. tobackup is None or a set of files to backup before they
838 are modified by a patch.
838 are modified by a patch.
839 """
839 """
840 # TODO unify with commands.py
840 # TODO unify with commands.py
841 if not patchdir:
841 if not patchdir:
842 patchdir = self.path
842 patchdir = self.path
843 err = 0
843 err = 0
844 n = None
844 n = None
845 for patchname in series:
845 for patchname in series:
846 pushable, reason = self.pushable(patchname)
846 pushable, reason = self.pushable(patchname)
847 if not pushable:
847 if not pushable:
848 self.explainpushable(patchname, all_patches=True)
848 self.explainpushable(patchname, all_patches=True)
849 continue
849 continue
850 self.ui.status(_("applying %s\n") % patchname)
850 self.ui.status(_("applying %s\n") % patchname)
851 pf = os.path.join(patchdir, patchname)
851 pf = os.path.join(patchdir, patchname)
852
852
853 try:
853 try:
854 ph = patchheader(self.join(patchname), self.plainmode)
854 ph = patchheader(self.join(patchname), self.plainmode)
855 except IOError:
855 except IOError:
856 self.ui.warn(_("unable to read %s\n") % patchname)
856 self.ui.warn(_("unable to read %s\n") % patchname)
857 err = 1
857 err = 1
858 break
858 break
859
859
860 message = ph.message
860 message = ph.message
861 if not message:
861 if not message:
862 # The commit message should not be translated
862 # The commit message should not be translated
863 message = "imported patch %s\n" % patchname
863 message = "imported patch %s\n" % patchname
864 else:
864 else:
865 if list:
865 if list:
866 # The commit message should not be translated
866 # The commit message should not be translated
867 message.append("\nimported patch %s" % patchname)
867 message.append("\nimported patch %s" % patchname)
868 message = '\n'.join(message)
868 message = '\n'.join(message)
869
869
870 if ph.haspatch:
870 if ph.haspatch:
871 if tobackup:
871 if tobackup:
872 touched = patchmod.changedfiles(self.ui, repo, pf)
872 touched = patchmod.changedfiles(self.ui, repo, pf)
873 touched = set(touched) & tobackup
873 touched = set(touched) & tobackup
874 if touched and keepchanges:
874 if touched and keepchanges:
875 raise AbortNoCleanup(
875 raise AbortNoCleanup(
876 _("local changes found, refresh first"))
876 _("local changes found, refresh first"))
877 self.backup(repo, touched, copy=True)
877 self.backup(repo, touched, copy=True)
878 tobackup = tobackup - touched
878 tobackup = tobackup - touched
879 (patcherr, files, fuzz) = self.patch(repo, pf)
879 (patcherr, files, fuzz) = self.patch(repo, pf)
880 if all_files is not None:
880 if all_files is not None:
881 all_files.update(files)
881 all_files.update(files)
882 patcherr = not patcherr
882 patcherr = not patcherr
883 else:
883 else:
884 self.ui.warn(_("patch %s is empty\n") % patchname)
884 self.ui.warn(_("patch %s is empty\n") % patchname)
885 patcherr, files, fuzz = 0, [], 0
885 patcherr, files, fuzz = 0, [], 0
886
886
887 if merge and files:
887 if merge and files:
888 # Mark as removed/merged and update dirstate parent info
888 # Mark as removed/merged and update dirstate parent info
889 removed = []
889 removed = []
890 merged = []
890 merged = []
891 for f in files:
891 for f in files:
892 if os.path.lexists(repo.wjoin(f)):
892 if os.path.lexists(repo.wjoin(f)):
893 merged.append(f)
893 merged.append(f)
894 else:
894 else:
895 removed.append(f)
895 removed.append(f)
896 repo.dirstate.beginparentchange()
896 repo.dirstate.beginparentchange()
897 for f in removed:
897 for f in removed:
898 repo.dirstate.remove(f)
898 repo.dirstate.remove(f)
899 for f in merged:
899 for f in merged:
900 repo.dirstate.merge(f)
900 repo.dirstate.merge(f)
901 p1, p2 = repo.dirstate.parents()
901 p1, p2 = repo.dirstate.parents()
902 repo.setparents(p1, merge)
902 repo.setparents(p1, merge)
903 repo.dirstate.endparentchange()
903 repo.dirstate.endparentchange()
904
904
905 if all_files and '.hgsubstate' in all_files:
905 if all_files and '.hgsubstate' in all_files:
906 wctx = repo[None]
906 wctx = repo[None]
907 pctx = repo['.']
907 pctx = repo['.']
908 overwrite = False
908 overwrite = False
909 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
909 mergedsubstate = subrepo.submerge(repo, pctx, wctx, wctx,
910 overwrite)
910 overwrite)
911 files += mergedsubstate.keys()
911 files += mergedsubstate.keys()
912
912
913 match = scmutil.matchfiles(repo, files or [])
913 match = scmutil.matchfiles(repo, files or [])
914 oldtip = repo['tip']
914 oldtip = repo['tip']
915 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
915 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
916 force=True)
916 force=True)
917 if repo['tip'] == oldtip:
917 if repo['tip'] == oldtip:
918 raise util.Abort(_("qpush exactly duplicates child changeset"))
918 raise util.Abort(_("qpush exactly duplicates child changeset"))
919 if n is None:
919 if n is None:
920 raise util.Abort(_("repository commit failed"))
920 raise util.Abort(_("repository commit failed"))
921
921
922 if update_status:
922 if update_status:
923 self.applied.append(statusentry(n, patchname))
923 self.applied.append(statusentry(n, patchname))
924
924
925 if patcherr:
925 if patcherr:
926 self.ui.warn(_("patch failed, rejects left in working dir\n"))
926 self.ui.warn(_("patch failed, rejects left in working dir\n"))
927 err = 2
927 err = 2
928 break
928 break
929
929
930 if fuzz and strict:
930 if fuzz and strict:
931 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
931 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
932 err = 3
932 err = 3
933 break
933 break
934 return (err, n)
934 return (err, n)
935
935
936 def _cleanup(self, patches, numrevs, keep=False):
936 def _cleanup(self, patches, numrevs, keep=False):
937 if not keep:
937 if not keep:
938 r = self.qrepo()
938 r = self.qrepo()
939 if r:
939 if r:
940 r[None].forget(patches)
940 r[None].forget(patches)
941 for p in patches:
941 for p in patches:
942 try:
942 try:
943 os.unlink(self.join(p))
943 os.unlink(self.join(p))
944 except OSError, inst:
944 except OSError, inst:
945 if inst.errno != errno.ENOENT:
945 if inst.errno != errno.ENOENT:
946 raise
946 raise
947
947
948 qfinished = []
948 qfinished = []
949 if numrevs:
949 if numrevs:
950 qfinished = self.applied[:numrevs]
950 qfinished = self.applied[:numrevs]
951 del self.applied[:numrevs]
951 del self.applied[:numrevs]
952 self.applieddirty = True
952 self.applieddirty = True
953
953
954 unknown = []
954 unknown = []
955
955
956 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
956 for (i, p) in sorted([(self.findseries(p), p) for p in patches],
957 reverse=True):
957 reverse=True):
958 if i is not None:
958 if i is not None:
959 del self.fullseries[i]
959 del self.fullseries[i]
960 else:
960 else:
961 unknown.append(p)
961 unknown.append(p)
962
962
963 if unknown:
963 if unknown:
964 if numrevs:
964 if numrevs:
965 rev = dict((entry.name, entry.node) for entry in qfinished)
965 rev = dict((entry.name, entry.node) for entry in qfinished)
966 for p in unknown:
966 for p in unknown:
967 msg = _('revision %s refers to unknown patches: %s\n')
967 msg = _('revision %s refers to unknown patches: %s\n')
968 self.ui.warn(msg % (short(rev[p]), p))
968 self.ui.warn(msg % (short(rev[p]), p))
969 else:
969 else:
970 msg = _('unknown patches: %s\n')
970 msg = _('unknown patches: %s\n')
971 raise util.Abort(''.join(msg % p for p in unknown))
971 raise util.Abort(''.join(msg % p for p in unknown))
972
972
973 self.parseseries()
973 self.parseseries()
974 self.seriesdirty = True
974 self.seriesdirty = True
975 return [entry.node for entry in qfinished]
975 return [entry.node for entry in qfinished]
976
976
977 def _revpatches(self, repo, revs):
977 def _revpatches(self, repo, revs):
978 firstrev = repo[self.applied[0].node].rev()
978 firstrev = repo[self.applied[0].node].rev()
979 patches = []
979 patches = []
980 for i, rev in enumerate(revs):
980 for i, rev in enumerate(revs):
981
981
982 if rev < firstrev:
982 if rev < firstrev:
983 raise util.Abort(_('revision %d is not managed') % rev)
983 raise util.Abort(_('revision %d is not managed') % rev)
984
984
985 ctx = repo[rev]
985 ctx = repo[rev]
986 base = self.applied[i].node
986 base = self.applied[i].node
987 if ctx.node() != base:
987 if ctx.node() != base:
988 msg = _('cannot delete revision %d above applied patches')
988 msg = _('cannot delete revision %d above applied patches')
989 raise util.Abort(msg % rev)
989 raise util.Abort(msg % rev)
990
990
991 patch = self.applied[i].name
991 patch = self.applied[i].name
992 for fmt in ('[mq]: %s', 'imported patch %s'):
992 for fmt in ('[mq]: %s', 'imported patch %s'):
993 if ctx.description() == fmt % patch:
993 if ctx.description() == fmt % patch:
994 msg = _('patch %s finalized without changeset message\n')
994 msg = _('patch %s finalized without changeset message\n')
995 repo.ui.status(msg % patch)
995 repo.ui.status(msg % patch)
996 break
996 break
997
997
998 patches.append(patch)
998 patches.append(patch)
999 return patches
999 return patches
1000
1000
1001 def finish(self, repo, revs):
1001 def finish(self, repo, revs):
1002 # Manually trigger phase computation to ensure phasedefaults is
1002 # Manually trigger phase computation to ensure phasedefaults is
1003 # executed before we remove the patches.
1003 # executed before we remove the patches.
1004 repo._phasecache
1004 repo._phasecache
1005 patches = self._revpatches(repo, sorted(revs))
1005 patches = self._revpatches(repo, sorted(revs))
1006 qfinished = self._cleanup(patches, len(patches))
1006 qfinished = self._cleanup(patches, len(patches))
1007 if qfinished and repo.ui.configbool('mq', 'secret', False):
1007 if qfinished and repo.ui.configbool('mq', 'secret', False):
1008 # only use this logic when the secret option is added
1008 # only use this logic when the secret option is added
1009 oldqbase = repo[qfinished[0]]
1009 oldqbase = repo[qfinished[0]]
1010 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1010 tphase = repo.ui.config('phases', 'new-commit', phases.draft)
1011 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1011 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1012 tr = repo.transaction('qfinish')
1012 tr = repo.transaction('qfinish')
1013 try:
1013 try:
1014 phases.advanceboundary(repo, tr, tphase, qfinished)
1014 phases.advanceboundary(repo, tr, tphase, qfinished)
1015 tr.close()
1015 tr.close()
1016 finally:
1016 finally:
1017 tr.release()
1017 tr.release()
1018
1018
1019 def delete(self, repo, patches, opts):
1019 def delete(self, repo, patches, opts):
1020 if not patches and not opts.get('rev'):
1020 if not patches and not opts.get('rev'):
1021 raise util.Abort(_('qdelete requires at least one revision or '
1021 raise util.Abort(_('qdelete requires at least one revision or '
1022 'patch name'))
1022 'patch name'))
1023
1023
1024 realpatches = []
1024 realpatches = []
1025 for patch in patches:
1025 for patch in patches:
1026 patch = self.lookup(patch, strict=True)
1026 patch = self.lookup(patch, strict=True)
1027 info = self.isapplied(patch)
1027 info = self.isapplied(patch)
1028 if info:
1028 if info:
1029 raise util.Abort(_("cannot delete applied patch %s") % patch)
1029 raise util.Abort(_("cannot delete applied patch %s") % patch)
1030 if patch not in self.series:
1030 if patch not in self.series:
1031 raise util.Abort(_("patch %s not in series file") % patch)
1031 raise util.Abort(_("patch %s not in series file") % patch)
1032 if patch not in realpatches:
1032 if patch not in realpatches:
1033 realpatches.append(patch)
1033 realpatches.append(patch)
1034
1034
1035 numrevs = 0
1035 numrevs = 0
1036 if opts.get('rev'):
1036 if opts.get('rev'):
1037 if not self.applied:
1037 if not self.applied:
1038 raise util.Abort(_('no patches applied'))
1038 raise util.Abort(_('no patches applied'))
1039 revs = scmutil.revrange(repo, opts.get('rev'))
1039 revs = scmutil.revrange(repo, opts.get('rev'))
1040 revs.sort()
1040 revs.sort()
1041 revpatches = self._revpatches(repo, revs)
1041 revpatches = self._revpatches(repo, revs)
1042 realpatches += revpatches
1042 realpatches += revpatches
1043 numrevs = len(revpatches)
1043 numrevs = len(revpatches)
1044
1044
1045 self._cleanup(realpatches, numrevs, opts.get('keep'))
1045 self._cleanup(realpatches, numrevs, opts.get('keep'))
1046
1046
1047 def checktoppatch(self, repo):
1047 def checktoppatch(self, repo):
1048 '''check that working directory is at qtip'''
1048 '''check that working directory is at qtip'''
1049 if self.applied:
1049 if self.applied:
1050 top = self.applied[-1].node
1050 top = self.applied[-1].node
1051 patch = self.applied[-1].name
1051 patch = self.applied[-1].name
1052 if repo.dirstate.p1() != top:
1052 if repo.dirstate.p1() != top:
1053 raise util.Abort(_("working directory revision is not qtip"))
1053 raise util.Abort(_("working directory revision is not qtip"))
1054 return top, patch
1054 return top, patch
1055 return None, None
1055 return None, None
1056
1056
1057 def putsubstate2changes(self, substatestate, changes):
1057 def putsubstate2changes(self, substatestate, changes):
1058 for files in changes[:3]:
1058 for files in changes[:3]:
1059 if '.hgsubstate' in files:
1059 if '.hgsubstate' in files:
1060 return # already listed up
1060 return # already listed up
1061 # not yet listed up
1061 # not yet listed up
1062 if substatestate in 'a?':
1062 if substatestate in 'a?':
1063 changes[1].append('.hgsubstate')
1063 changes[1].append('.hgsubstate')
1064 elif substatestate in 'r':
1064 elif substatestate in 'r':
1065 changes[2].append('.hgsubstate')
1065 changes[2].append('.hgsubstate')
1066 else: # modified
1066 else: # modified
1067 changes[0].append('.hgsubstate')
1067 changes[0].append('.hgsubstate')
1068
1068
1069 def checklocalchanges(self, repo, force=False, refresh=True):
1069 def checklocalchanges(self, repo, force=False, refresh=True):
1070 excsuffix = ''
1070 excsuffix = ''
1071 if refresh:
1071 if refresh:
1072 excsuffix = ', refresh first'
1072 excsuffix = ', refresh first'
1073 # plain versions for i18n tool to detect them
1073 # plain versions for i18n tool to detect them
1074 _("local changes found, refresh first")
1074 _("local changes found, refresh first")
1075 _("local changed subrepos found, refresh first")
1075 _("local changed subrepos found, refresh first")
1076 return checklocalchanges(repo, force, excsuffix)
1076 return checklocalchanges(repo, force, excsuffix)
1077
1077
1078 _reserved = ('series', 'status', 'guards', '.', '..')
1078 _reserved = ('series', 'status', 'guards', '.', '..')
1079 def checkreservedname(self, name):
1079 def checkreservedname(self, name):
1080 if name in self._reserved:
1080 if name in self._reserved:
1081 raise util.Abort(_('"%s" cannot be used as the name of a patch')
1081 raise util.Abort(_('"%s" cannot be used as the name of a patch')
1082 % name)
1082 % name)
1083 for prefix in ('.hg', '.mq'):
1083 for prefix in ('.hg', '.mq'):
1084 if name.startswith(prefix):
1084 if name.startswith(prefix):
1085 raise util.Abort(_('patch name cannot begin with "%s"')
1085 raise util.Abort(_('patch name cannot begin with "%s"')
1086 % prefix)
1086 % prefix)
1087 for c in ('#', ':'):
1087 for c in ('#', ':'):
1088 if c in name:
1088 if c in name:
1089 raise util.Abort(_('"%s" cannot be used in the name of a patch')
1089 raise util.Abort(_('"%s" cannot be used in the name of a patch')
1090 % c)
1090 % c)
1091
1091
1092 def checkpatchname(self, name, force=False):
1092 def checkpatchname(self, name, force=False):
1093 self.checkreservedname(name)
1093 self.checkreservedname(name)
1094 if not force and os.path.exists(self.join(name)):
1094 if not force and os.path.exists(self.join(name)):
1095 if os.path.isdir(self.join(name)):
1095 if os.path.isdir(self.join(name)):
1096 raise util.Abort(_('"%s" already exists as a directory')
1096 raise util.Abort(_('"%s" already exists as a directory')
1097 % name)
1097 % name)
1098 else:
1098 else:
1099 raise util.Abort(_('patch "%s" already exists') % name)
1099 raise util.Abort(_('patch "%s" already exists') % name)
1100
1100
1101 def checkkeepchanges(self, keepchanges, force):
1101 def checkkeepchanges(self, keepchanges, force):
1102 if force and keepchanges:
1102 if force and keepchanges:
1103 raise util.Abort(_('cannot use both --force and --keep-changes'))
1103 raise util.Abort(_('cannot use both --force and --keep-changes'))
1104
1104
1105 def new(self, repo, patchfn, *pats, **opts):
1105 def new(self, repo, patchfn, *pats, **opts):
1106 """options:
1106 """options:
1107 msg: a string or a no-argument function returning a string
1107 msg: a string or a no-argument function returning a string
1108 """
1108 """
1109 msg = opts.get('msg')
1109 msg = opts.get('msg')
1110 edit = opts.get('edit')
1110 edit = opts.get('edit')
1111 editform = opts.get('editform', 'mq.qnew')
1111 editform = opts.get('editform', 'mq.qnew')
1112 user = opts.get('user')
1112 user = opts.get('user')
1113 date = opts.get('date')
1113 date = opts.get('date')
1114 if date:
1114 if date:
1115 date = util.parsedate(date)
1115 date = util.parsedate(date)
1116 diffopts = self.diffopts({'git': opts.get('git')})
1116 diffopts = self.diffopts({'git': opts.get('git')})
1117 if opts.get('checkname', True):
1117 if opts.get('checkname', True):
1118 self.checkpatchname(patchfn)
1118 self.checkpatchname(patchfn)
1119 inclsubs = checksubstate(repo)
1119 inclsubs = checksubstate(repo)
1120 if inclsubs:
1120 if inclsubs:
1121 substatestate = repo.dirstate['.hgsubstate']
1121 substatestate = repo.dirstate['.hgsubstate']
1122 if opts.get('include') or opts.get('exclude') or pats:
1122 if opts.get('include') or opts.get('exclude') or pats:
1123 match = scmutil.match(repo[None], pats, opts)
1123 match = scmutil.match(repo[None], pats, opts)
1124 # detect missing files in pats
1124 # detect missing files in pats
1125 def badfn(f, msg):
1125 def badfn(f, msg):
1126 if f != '.hgsubstate': # .hgsubstate is auto-created
1126 if f != '.hgsubstate': # .hgsubstate is auto-created
1127 raise util.Abort('%s: %s' % (f, msg))
1127 raise util.Abort('%s: %s' % (f, msg))
1128 match.bad = badfn
1128 match.bad = badfn
1129 changes = repo.status(match=match)
1129 changes = repo.status(match=match)
1130 else:
1130 else:
1131 changes = self.checklocalchanges(repo, force=True)
1131 changes = self.checklocalchanges(repo, force=True)
1132 commitfiles = list(inclsubs)
1132 commitfiles = list(inclsubs)
1133 for files in changes[:3]:
1133 for files in changes[:3]:
1134 commitfiles.extend(files)
1134 commitfiles.extend(files)
1135 match = scmutil.matchfiles(repo, commitfiles)
1135 match = scmutil.matchfiles(repo, commitfiles)
1136 if len(repo[None].parents()) > 1:
1136 if len(repo[None].parents()) > 1:
1137 raise util.Abort(_('cannot manage merge changesets'))
1137 raise util.Abort(_('cannot manage merge changesets'))
1138 self.checktoppatch(repo)
1138 self.checktoppatch(repo)
1139 insert = self.fullseriesend()
1139 insert = self.fullseriesend()
1140 wlock = repo.wlock()
1140 wlock = repo.wlock()
1141 try:
1141 try:
1142 try:
1142 try:
1143 # if patch file write fails, abort early
1143 # if patch file write fails, abort early
1144 p = self.opener(patchfn, "w")
1144 p = self.opener(patchfn, "w")
1145 except IOError, e:
1145 except IOError, e:
1146 raise util.Abort(_('cannot write patch "%s": %s')
1146 raise util.Abort(_('cannot write patch "%s": %s')
1147 % (patchfn, e.strerror))
1147 % (patchfn, e.strerror))
1148 try:
1148 try:
1149 defaultmsg = "[mq]: %s" % patchfn
1149 defaultmsg = "[mq]: %s" % patchfn
1150 editor = cmdutil.getcommiteditor(editform=editform)
1150 editor = cmdutil.getcommiteditor(editform=editform)
1151 if edit:
1151 if edit:
1152 def finishdesc(desc):
1152 def finishdesc(desc):
1153 if desc.rstrip():
1153 if desc.rstrip():
1154 return desc
1154 return desc
1155 else:
1155 else:
1156 return defaultmsg
1156 return defaultmsg
1157 # i18n: this message is shown in editor with "HG: " prefix
1157 # i18n: this message is shown in editor with "HG: " prefix
1158 extramsg = _('Leave message empty to use default message.')
1158 extramsg = _('Leave message empty to use default message.')
1159 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1159 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1160 extramsg=extramsg,
1160 extramsg=extramsg,
1161 editform=editform)
1161 editform=editform)
1162 commitmsg = msg
1162 commitmsg = msg
1163 else:
1163 else:
1164 commitmsg = msg or defaultmsg
1164 commitmsg = msg or defaultmsg
1165
1165
1166 n = newcommit(repo, None, commitmsg, user, date, match=match,
1166 n = newcommit(repo, None, commitmsg, user, date, match=match,
1167 force=True, editor=editor)
1167 force=True, editor=editor)
1168 if n is None:
1168 if n is None:
1169 raise util.Abort(_("repo commit failed"))
1169 raise util.Abort(_("repo commit failed"))
1170 try:
1170 try:
1171 self.fullseries[insert:insert] = [patchfn]
1171 self.fullseries[insert:insert] = [patchfn]
1172 self.applied.append(statusentry(n, patchfn))
1172 self.applied.append(statusentry(n, patchfn))
1173 self.parseseries()
1173 self.parseseries()
1174 self.seriesdirty = True
1174 self.seriesdirty = True
1175 self.applieddirty = True
1175 self.applieddirty = True
1176 nctx = repo[n]
1176 nctx = repo[n]
1177 ph = patchheader(self.join(patchfn), self.plainmode)
1177 ph = patchheader(self.join(patchfn), self.plainmode)
1178 if user:
1178 if user:
1179 ph.setuser(user)
1179 ph.setuser(user)
1180 if date:
1180 if date:
1181 ph.setdate('%s %s' % date)
1181 ph.setdate('%s %s' % date)
1182 ph.setparent(hex(nctx.p1().node()))
1182 ph.setparent(hex(nctx.p1().node()))
1183 msg = nctx.description().strip()
1183 msg = nctx.description().strip()
1184 if msg == defaultmsg.strip():
1184 if msg == defaultmsg.strip():
1185 msg = ''
1185 msg = ''
1186 ph.setmessage(msg)
1186 ph.setmessage(msg)
1187 p.write(str(ph))
1187 p.write(str(ph))
1188 if commitfiles:
1188 if commitfiles:
1189 parent = self.qparents(repo, n)
1189 parent = self.qparents(repo, n)
1190 if inclsubs:
1190 if inclsubs:
1191 self.putsubstate2changes(substatestate, changes)
1191 self.putsubstate2changes(substatestate, changes)
1192 chunks = patchmod.diff(repo, node1=parent, node2=n,
1192 chunks = patchmod.diff(repo, node1=parent, node2=n,
1193 changes=changes, opts=diffopts)
1193 changes=changes, opts=diffopts)
1194 for chunk in chunks:
1194 for chunk in chunks:
1195 p.write(chunk)
1195 p.write(chunk)
1196 p.close()
1196 p.close()
1197 r = self.qrepo()
1197 r = self.qrepo()
1198 if r:
1198 if r:
1199 r[None].add([patchfn])
1199 r[None].add([patchfn])
1200 except: # re-raises
1200 except: # re-raises
1201 repo.rollback()
1201 repo.rollback()
1202 raise
1202 raise
1203 except Exception:
1203 except Exception:
1204 patchpath = self.join(patchfn)
1204 patchpath = self.join(patchfn)
1205 try:
1205 try:
1206 os.unlink(patchpath)
1206 os.unlink(patchpath)
1207 except OSError:
1207 except OSError:
1208 self.ui.warn(_('error unlinking %s\n') % patchpath)
1208 self.ui.warn(_('error unlinking %s\n') % patchpath)
1209 raise
1209 raise
1210 self.removeundo(repo)
1210 self.removeundo(repo)
1211 finally:
1211 finally:
1212 release(wlock)
1212 release(wlock)
1213
1213
1214 def isapplied(self, patch):
1214 def isapplied(self, patch):
1215 """returns (index, rev, patch)"""
1215 """returns (index, rev, patch)"""
1216 for i, a in enumerate(self.applied):
1216 for i, a in enumerate(self.applied):
1217 if a.name == patch:
1217 if a.name == patch:
1218 return (i, a.node, a.name)
1218 return (i, a.node, a.name)
1219 return None
1219 return None
1220
1220
1221 # if the exact patch name does not exist, we try a few
1221 # if the exact patch name does not exist, we try a few
1222 # variations. If strict is passed, we try only #1
1222 # variations. If strict is passed, we try only #1
1223 #
1223 #
1224 # 1) a number (as string) to indicate an offset in the series file
1224 # 1) a number (as string) to indicate an offset in the series file
1225 # 2) a unique substring of the patch name was given
1225 # 2) a unique substring of the patch name was given
1226 # 3) patchname[-+]num to indicate an offset in the series file
1226 # 3) patchname[-+]num to indicate an offset in the series file
1227 def lookup(self, patch, strict=False):
1227 def lookup(self, patch, strict=False):
1228 def partialname(s):
1228 def partialname(s):
1229 if s in self.series:
1229 if s in self.series:
1230 return s
1230 return s
1231 matches = [x for x in self.series if s in x]
1231 matches = [x for x in self.series if s in x]
1232 if len(matches) > 1:
1232 if len(matches) > 1:
1233 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1233 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1234 for m in matches:
1234 for m in matches:
1235 self.ui.warn(' %s\n' % m)
1235 self.ui.warn(' %s\n' % m)
1236 return None
1236 return None
1237 if matches:
1237 if matches:
1238 return matches[0]
1238 return matches[0]
1239 if self.series and self.applied:
1239 if self.series and self.applied:
1240 if s == 'qtip':
1240 if s == 'qtip':
1241 return self.series[self.seriesend(True) - 1]
1241 return self.series[self.seriesend(True) - 1]
1242 if s == 'qbase':
1242 if s == 'qbase':
1243 return self.series[0]
1243 return self.series[0]
1244 return None
1244 return None
1245
1245
1246 if patch in self.series:
1246 if patch in self.series:
1247 return patch
1247 return patch
1248
1248
1249 if not os.path.isfile(self.join(patch)):
1249 if not os.path.isfile(self.join(patch)):
1250 try:
1250 try:
1251 sno = int(patch)
1251 sno = int(patch)
1252 except (ValueError, OverflowError):
1252 except (ValueError, OverflowError):
1253 pass
1253 pass
1254 else:
1254 else:
1255 if -len(self.series) <= sno < len(self.series):
1255 if -len(self.series) <= sno < len(self.series):
1256 return self.series[sno]
1256 return self.series[sno]
1257
1257
1258 if not strict:
1258 if not strict:
1259 res = partialname(patch)
1259 res = partialname(patch)
1260 if res:
1260 if res:
1261 return res
1261 return res
1262 minus = patch.rfind('-')
1262 minus = patch.rfind('-')
1263 if minus >= 0:
1263 if minus >= 0:
1264 res = partialname(patch[:minus])
1264 res = partialname(patch[:minus])
1265 if res:
1265 if res:
1266 i = self.series.index(res)
1266 i = self.series.index(res)
1267 try:
1267 try:
1268 off = int(patch[minus + 1:] or 1)
1268 off = int(patch[minus + 1:] or 1)
1269 except (ValueError, OverflowError):
1269 except (ValueError, OverflowError):
1270 pass
1270 pass
1271 else:
1271 else:
1272 if i - off >= 0:
1272 if i - off >= 0:
1273 return self.series[i - off]
1273 return self.series[i - off]
1274 plus = patch.rfind('+')
1274 plus = patch.rfind('+')
1275 if plus >= 0:
1275 if plus >= 0:
1276 res = partialname(patch[:plus])
1276 res = partialname(patch[:plus])
1277 if res:
1277 if res:
1278 i = self.series.index(res)
1278 i = self.series.index(res)
1279 try:
1279 try:
1280 off = int(patch[plus + 1:] or 1)
1280 off = int(patch[plus + 1:] or 1)
1281 except (ValueError, OverflowError):
1281 except (ValueError, OverflowError):
1282 pass
1282 pass
1283 else:
1283 else:
1284 if i + off < len(self.series):
1284 if i + off < len(self.series):
1285 return self.series[i + off]
1285 return self.series[i + off]
1286 raise util.Abort(_("patch %s not in series") % patch)
1286 raise util.Abort(_("patch %s not in series") % patch)
1287
1287
1288 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1288 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1289 all=False, move=False, exact=False, nobackup=False,
1289 all=False, move=False, exact=False, nobackup=False,
1290 keepchanges=False):
1290 keepchanges=False):
1291 self.checkkeepchanges(keepchanges, force)
1291 self.checkkeepchanges(keepchanges, force)
1292 diffopts = self.diffopts()
1292 diffopts = self.diffopts()
1293 wlock = repo.wlock()
1293 wlock = repo.wlock()
1294 try:
1294 try:
1295 heads = []
1295 heads = []
1296 for hs in repo.branchmap().itervalues():
1296 for hs in repo.branchmap().itervalues():
1297 heads.extend(hs)
1297 heads.extend(hs)
1298 if not heads:
1298 if not heads:
1299 heads = [nullid]
1299 heads = [nullid]
1300 if repo.dirstate.p1() not in heads and not exact:
1300 if repo.dirstate.p1() not in heads and not exact:
1301 self.ui.status(_("(working directory not at a head)\n"))
1301 self.ui.status(_("(working directory not at a head)\n"))
1302
1302
1303 if not self.series:
1303 if not self.series:
1304 self.ui.warn(_('no patches in series\n'))
1304 self.ui.warn(_('no patches in series\n'))
1305 return 0
1305 return 0
1306
1306
1307 # Suppose our series file is: A B C and the current 'top'
1307 # Suppose our series file is: A B C and the current 'top'
1308 # patch is B. qpush C should be performed (moving forward)
1308 # patch is B. qpush C should be performed (moving forward)
1309 # qpush B is a NOP (no change) qpush A is an error (can't
1309 # qpush B is a NOP (no change) qpush A is an error (can't
1310 # go backwards with qpush)
1310 # go backwards with qpush)
1311 if patch:
1311 if patch:
1312 patch = self.lookup(patch)
1312 patch = self.lookup(patch)
1313 info = self.isapplied(patch)
1313 info = self.isapplied(patch)
1314 if info and info[0] >= len(self.applied) - 1:
1314 if info and info[0] >= len(self.applied) - 1:
1315 self.ui.warn(
1315 self.ui.warn(
1316 _('qpush: %s is already at the top\n') % patch)
1316 _('qpush: %s is already at the top\n') % patch)
1317 return 0
1317 return 0
1318
1318
1319 pushable, reason = self.pushable(patch)
1319 pushable, reason = self.pushable(patch)
1320 if pushable:
1320 if pushable:
1321 if self.series.index(patch) < self.seriesend():
1321 if self.series.index(patch) < self.seriesend():
1322 raise util.Abort(
1322 raise util.Abort(
1323 _("cannot push to a previous patch: %s") % patch)
1323 _("cannot push to a previous patch: %s") % patch)
1324 else:
1324 else:
1325 if reason:
1325 if reason:
1326 reason = _('guarded by %s') % reason
1326 reason = _('guarded by %s') % reason
1327 else:
1327 else:
1328 reason = _('no matching guards')
1328 reason = _('no matching guards')
1329 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1329 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1330 return 1
1330 return 1
1331 elif all:
1331 elif all:
1332 patch = self.series[-1]
1332 patch = self.series[-1]
1333 if self.isapplied(patch):
1333 if self.isapplied(patch):
1334 self.ui.warn(_('all patches are currently applied\n'))
1334 self.ui.warn(_('all patches are currently applied\n'))
1335 return 0
1335 return 0
1336
1336
1337 # Following the above example, starting at 'top' of B:
1337 # Following the above example, starting at 'top' of B:
1338 # qpush should be performed (pushes C), but a subsequent
1338 # qpush should be performed (pushes C), but a subsequent
1339 # qpush without an argument is an error (nothing to
1339 # qpush without an argument is an error (nothing to
1340 # apply). This allows a loop of "...while hg qpush..." to
1340 # apply). This allows a loop of "...while hg qpush..." to
1341 # work as it detects an error when done
1341 # work as it detects an error when done
1342 start = self.seriesend()
1342 start = self.seriesend()
1343 if start == len(self.series):
1343 if start == len(self.series):
1344 self.ui.warn(_('patch series already fully applied\n'))
1344 self.ui.warn(_('patch series already fully applied\n'))
1345 return 1
1345 return 1
1346 if not force and not keepchanges:
1346 if not force and not keepchanges:
1347 self.checklocalchanges(repo, refresh=self.applied)
1347 self.checklocalchanges(repo, refresh=self.applied)
1348
1348
1349 if exact:
1349 if exact:
1350 if keepchanges:
1350 if keepchanges:
1351 raise util.Abort(
1351 raise util.Abort(
1352 _("cannot use --exact and --keep-changes together"))
1352 _("cannot use --exact and --keep-changes together"))
1353 if move:
1353 if move:
1354 raise util.Abort(_('cannot use --exact and --move '
1354 raise util.Abort(_('cannot use --exact and --move '
1355 'together'))
1355 'together'))
1356 if self.applied:
1356 if self.applied:
1357 raise util.Abort(_('cannot push --exact with applied '
1357 raise util.Abort(_('cannot push --exact with applied '
1358 'patches'))
1358 'patches'))
1359 root = self.series[start]
1359 root = self.series[start]
1360 target = patchheader(self.join(root), self.plainmode).parent
1360 target = patchheader(self.join(root), self.plainmode).parent
1361 if not target:
1361 if not target:
1362 raise util.Abort(
1362 raise util.Abort(
1363 _("%s does not have a parent recorded") % root)
1363 _("%s does not have a parent recorded") % root)
1364 if not repo[target] == repo['.']:
1364 if not repo[target] == repo['.']:
1365 hg.update(repo, target)
1365 hg.update(repo, target)
1366
1366
1367 if move:
1367 if move:
1368 if not patch:
1368 if not patch:
1369 raise util.Abort(_("please specify the patch to move"))
1369 raise util.Abort(_("please specify the patch to move"))
1370 for fullstart, rpn in enumerate(self.fullseries):
1370 for fullstart, rpn in enumerate(self.fullseries):
1371 # strip markers for patch guards
1371 # strip markers for patch guards
1372 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1372 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1373 break
1373 break
1374 for i, rpn in enumerate(self.fullseries[fullstart:]):
1374 for i, rpn in enumerate(self.fullseries[fullstart:]):
1375 # strip markers for patch guards
1375 # strip markers for patch guards
1376 if self.guard_re.split(rpn, 1)[0] == patch:
1376 if self.guard_re.split(rpn, 1)[0] == patch:
1377 break
1377 break
1378 index = fullstart + i
1378 index = fullstart + i
1379 assert index < len(self.fullseries)
1379 assert index < len(self.fullseries)
1380 fullpatch = self.fullseries[index]
1380 fullpatch = self.fullseries[index]
1381 del self.fullseries[index]
1381 del self.fullseries[index]
1382 self.fullseries.insert(fullstart, fullpatch)
1382 self.fullseries.insert(fullstart, fullpatch)
1383 self.parseseries()
1383 self.parseseries()
1384 self.seriesdirty = True
1384 self.seriesdirty = True
1385
1385
1386 self.applieddirty = True
1386 self.applieddirty = True
1387 if start > 0:
1387 if start > 0:
1388 self.checktoppatch(repo)
1388 self.checktoppatch(repo)
1389 if not patch:
1389 if not patch:
1390 patch = self.series[start]
1390 patch = self.series[start]
1391 end = start + 1
1391 end = start + 1
1392 else:
1392 else:
1393 end = self.series.index(patch, start) + 1
1393 end = self.series.index(patch, start) + 1
1394
1394
1395 tobackup = set()
1395 tobackup = set()
1396 if (not nobackup and force) or keepchanges:
1396 if (not nobackup and force) or keepchanges:
1397 status = self.checklocalchanges(repo, force=True)
1397 status = self.checklocalchanges(repo, force=True)
1398 if keepchanges:
1398 if keepchanges:
1399 tobackup.update(status.modified + status.added +
1399 tobackup.update(status.modified + status.added +
1400 status.removed + status.deleted)
1400 status.removed + status.deleted)
1401 else:
1401 else:
1402 tobackup.update(status.modified + status.added)
1402 tobackup.update(status.modified + status.added)
1403
1403
1404 s = self.series[start:end]
1404 s = self.series[start:end]
1405 all_files = set()
1405 all_files = set()
1406 try:
1406 try:
1407 if mergeq:
1407 if mergeq:
1408 ret = self.mergepatch(repo, mergeq, s, diffopts)
1408 ret = self.mergepatch(repo, mergeq, s, diffopts)
1409 else:
1409 else:
1410 ret = self.apply(repo, s, list, all_files=all_files,
1410 ret = self.apply(repo, s, list, all_files=all_files,
1411 tobackup=tobackup, keepchanges=keepchanges)
1411 tobackup=tobackup, keepchanges=keepchanges)
1412 except: # re-raises
1412 except: # re-raises
1413 self.ui.warn(_('cleaning up working directory...'))
1413 self.ui.warn(_('cleaning up working directory...'))
1414 node = repo.dirstate.p1()
1414 node = repo.dirstate.p1()
1415 hg.revert(repo, node, None)
1415 hg.revert(repo, node, None)
1416 # only remove unknown files that we know we touched or
1416 # only remove unknown files that we know we touched or
1417 # created while patching
1417 # created while patching
1418 for f in all_files:
1418 for f in all_files:
1419 if f not in repo.dirstate:
1419 if f not in repo.dirstate:
1420 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1420 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1421 self.ui.warn(_('done\n'))
1421 self.ui.warn(_('done\n'))
1422 raise
1422 raise
1423
1423
1424 if not self.applied:
1424 if not self.applied:
1425 return ret[0]
1425 return ret[0]
1426 top = self.applied[-1].name
1426 top = self.applied[-1].name
1427 if ret[0] and ret[0] > 1:
1427 if ret[0] and ret[0] > 1:
1428 msg = _("errors during apply, please fix and refresh %s\n")
1428 msg = _("errors during apply, please fix and refresh %s\n")
1429 self.ui.write(msg % top)
1429 self.ui.write(msg % top)
1430 else:
1430 else:
1431 self.ui.write(_("now at: %s\n") % top)
1431 self.ui.write(_("now at: %s\n") % top)
1432 return ret[0]
1432 return ret[0]
1433
1433
1434 finally:
1434 finally:
1435 wlock.release()
1435 wlock.release()
1436
1436
1437 def pop(self, repo, patch=None, force=False, update=True, all=False,
1437 def pop(self, repo, patch=None, force=False, update=True, all=False,
1438 nobackup=False, keepchanges=False):
1438 nobackup=False, keepchanges=False):
1439 self.checkkeepchanges(keepchanges, force)
1439 self.checkkeepchanges(keepchanges, force)
1440 wlock = repo.wlock()
1440 wlock = repo.wlock()
1441 try:
1441 try:
1442 if patch:
1442 if patch:
1443 # index, rev, patch
1443 # index, rev, patch
1444 info = self.isapplied(patch)
1444 info = self.isapplied(patch)
1445 if not info:
1445 if not info:
1446 patch = self.lookup(patch)
1446 patch = self.lookup(patch)
1447 info = self.isapplied(patch)
1447 info = self.isapplied(patch)
1448 if not info:
1448 if not info:
1449 raise util.Abort(_("patch %s is not applied") % patch)
1449 raise util.Abort(_("patch %s is not applied") % patch)
1450
1450
1451 if not self.applied:
1451 if not self.applied:
1452 # Allow qpop -a to work repeatedly,
1452 # Allow qpop -a to work repeatedly,
1453 # but not qpop without an argument
1453 # but not qpop without an argument
1454 self.ui.warn(_("no patches applied\n"))
1454 self.ui.warn(_("no patches applied\n"))
1455 return not all
1455 return not all
1456
1456
1457 if all:
1457 if all:
1458 start = 0
1458 start = 0
1459 elif patch:
1459 elif patch:
1460 start = info[0] + 1
1460 start = info[0] + 1
1461 else:
1461 else:
1462 start = len(self.applied) - 1
1462 start = len(self.applied) - 1
1463
1463
1464 if start >= len(self.applied):
1464 if start >= len(self.applied):
1465 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1465 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1466 return
1466 return
1467
1467
1468 if not update:
1468 if not update:
1469 parents = repo.dirstate.parents()
1469 parents = repo.dirstate.parents()
1470 rr = [x.node for x in self.applied]
1470 rr = [x.node for x in self.applied]
1471 for p in parents:
1471 for p in parents:
1472 if p in rr:
1472 if p in rr:
1473 self.ui.warn(_("qpop: forcing dirstate update\n"))
1473 self.ui.warn(_("qpop: forcing dirstate update\n"))
1474 update = True
1474 update = True
1475 else:
1475 else:
1476 parents = [p.node() for p in repo[None].parents()]
1476 parents = [p.node() for p in repo[None].parents()]
1477 needupdate = False
1477 needupdate = False
1478 for entry in self.applied[start:]:
1478 for entry in self.applied[start:]:
1479 if entry.node in parents:
1479 if entry.node in parents:
1480 needupdate = True
1480 needupdate = True
1481 break
1481 break
1482 update = needupdate
1482 update = needupdate
1483
1483
1484 tobackup = set()
1484 tobackup = set()
1485 if update:
1485 if update:
1486 s = self.checklocalchanges(repo, force=force or keepchanges)
1486 s = self.checklocalchanges(repo, force=force or keepchanges)
1487 if force:
1487 if force:
1488 if not nobackup:
1488 if not nobackup:
1489 tobackup.update(s.modified + s.added)
1489 tobackup.update(s.modified + s.added)
1490 elif keepchanges:
1490 elif keepchanges:
1491 tobackup.update(s.modified + s.added +
1491 tobackup.update(s.modified + s.added +
1492 s.removed + s.deleted)
1492 s.removed + s.deleted)
1493
1493
1494 self.applieddirty = True
1494 self.applieddirty = True
1495 end = len(self.applied)
1495 end = len(self.applied)
1496 rev = self.applied[start].node
1496 rev = self.applied[start].node
1497
1497
1498 try:
1498 try:
1499 heads = repo.changelog.heads(rev)
1499 heads = repo.changelog.heads(rev)
1500 except error.LookupError:
1500 except error.LookupError:
1501 node = short(rev)
1501 node = short(rev)
1502 raise util.Abort(_('trying to pop unknown node %s') % node)
1502 raise util.Abort(_('trying to pop unknown node %s') % node)
1503
1503
1504 if heads != [self.applied[-1].node]:
1504 if heads != [self.applied[-1].node]:
1505 raise util.Abort(_("popping would remove a revision not "
1505 raise util.Abort(_("popping would remove a revision not "
1506 "managed by this patch queue"))
1506 "managed by this patch queue"))
1507 if not repo[self.applied[-1].node].mutable():
1507 if not repo[self.applied[-1].node].mutable():
1508 raise util.Abort(
1508 raise util.Abort(
1509 _("popping would remove an immutable revision"),
1509 _("popping would remove an immutable revision"),
1510 hint=_('see "hg help phases" for details'))
1510 hint=_('see "hg help phases" for details'))
1511
1511
1512 # we know there are no local changes, so we can make a simplified
1512 # we know there are no local changes, so we can make a simplified
1513 # form of hg.update.
1513 # form of hg.update.
1514 if update:
1514 if update:
1515 qp = self.qparents(repo, rev)
1515 qp = self.qparents(repo, rev)
1516 ctx = repo[qp]
1516 ctx = repo[qp]
1517 m, a, r, d = repo.status(qp, '.')[:4]
1517 m, a, r, d = repo.status(qp, '.')[:4]
1518 if d:
1518 if d:
1519 raise util.Abort(_("deletions found between repo revs"))
1519 raise util.Abort(_("deletions found between repo revs"))
1520
1520
1521 tobackup = set(a + m + r) & tobackup
1521 tobackup = set(a + m + r) & tobackup
1522 if keepchanges and tobackup:
1522 if keepchanges and tobackup:
1523 raise util.Abort(_("local changes found, refresh first"))
1523 raise util.Abort(_("local changes found, refresh first"))
1524 self.backup(repo, tobackup)
1524 self.backup(repo, tobackup)
1525 repo.dirstate.beginparentchange()
1525 repo.dirstate.beginparentchange()
1526 for f in a:
1526 for f in a:
1527 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1527 util.unlinkpath(repo.wjoin(f), ignoremissing=True)
1528 repo.dirstate.drop(f)
1528 repo.dirstate.drop(f)
1529 for f in m + r:
1529 for f in m + r:
1530 fctx = ctx[f]
1530 fctx = ctx[f]
1531 repo.wwrite(f, fctx.data(), fctx.flags())
1531 repo.wwrite(f, fctx.data(), fctx.flags())
1532 repo.dirstate.normal(f)
1532 repo.dirstate.normal(f)
1533 repo.setparents(qp, nullid)
1533 repo.setparents(qp, nullid)
1534 repo.dirstate.endparentchange()
1534 repo.dirstate.endparentchange()
1535 for patch in reversed(self.applied[start:end]):
1535 for patch in reversed(self.applied[start:end]):
1536 self.ui.status(_("popping %s\n") % patch.name)
1536 self.ui.status(_("popping %s\n") % patch.name)
1537 del self.applied[start:end]
1537 del self.applied[start:end]
1538 strip(self.ui, repo, [rev], update=False, backup=False)
1538 strip(self.ui, repo, [rev], update=False, backup=False)
1539 for s, state in repo['.'].substate.items():
1539 for s, state in repo['.'].substate.items():
1540 repo['.'].sub(s).get(state)
1540 repo['.'].sub(s).get(state)
1541 if self.applied:
1541 if self.applied:
1542 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1542 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1543 else:
1543 else:
1544 self.ui.write(_("patch queue now empty\n"))
1544 self.ui.write(_("patch queue now empty\n"))
1545 finally:
1545 finally:
1546 wlock.release()
1546 wlock.release()
1547
1547
1548 def diff(self, repo, pats, opts):
1548 def diff(self, repo, pats, opts):
1549 top, patch = self.checktoppatch(repo)
1549 top, patch = self.checktoppatch(repo)
1550 if not top:
1550 if not top:
1551 self.ui.write(_("no patches applied\n"))
1551 self.ui.write(_("no patches applied\n"))
1552 return
1552 return
1553 qp = self.qparents(repo, top)
1553 qp = self.qparents(repo, top)
1554 if opts.get('reverse'):
1554 if opts.get('reverse'):
1555 node1, node2 = None, qp
1555 node1, node2 = None, qp
1556 else:
1556 else:
1557 node1, node2 = qp, None
1557 node1, node2 = qp, None
1558 diffopts = self.diffopts(opts, patch)
1558 diffopts = self.diffopts(opts, patch)
1559 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1559 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1560
1560
1561 def refresh(self, repo, pats=None, **opts):
1561 def refresh(self, repo, pats=None, **opts):
1562 if not self.applied:
1562 if not self.applied:
1563 self.ui.write(_("no patches applied\n"))
1563 self.ui.write(_("no patches applied\n"))
1564 return 1
1564 return 1
1565 msg = opts.get('msg', '').rstrip()
1565 msg = opts.get('msg', '').rstrip()
1566 edit = opts.get('edit')
1566 edit = opts.get('edit')
1567 editform = opts.get('editform', 'mq.qrefresh')
1567 editform = opts.get('editform', 'mq.qrefresh')
1568 newuser = opts.get('user')
1568 newuser = opts.get('user')
1569 newdate = opts.get('date')
1569 newdate = opts.get('date')
1570 if newdate:
1570 if newdate:
1571 newdate = '%d %d' % util.parsedate(newdate)
1571 newdate = '%d %d' % util.parsedate(newdate)
1572 wlock = repo.wlock()
1572 wlock = repo.wlock()
1573
1573
1574 try:
1574 try:
1575 self.checktoppatch(repo)
1575 self.checktoppatch(repo)
1576 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1576 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1577 if repo.changelog.heads(top) != [top]:
1577 if repo.changelog.heads(top) != [top]:
1578 raise util.Abort(_("cannot refresh a revision with children"))
1578 raise util.Abort(_("cannot refresh a revision with children"))
1579 if not repo[top].mutable():
1579 if not repo[top].mutable():
1580 raise util.Abort(_("cannot refresh immutable revision"),
1580 raise util.Abort(_("cannot refresh immutable revision"),
1581 hint=_('see "hg help phases" for details'))
1581 hint=_('see "hg help phases" for details'))
1582
1582
1583 cparents = repo.changelog.parents(top)
1583 cparents = repo.changelog.parents(top)
1584 patchparent = self.qparents(repo, top)
1584 patchparent = self.qparents(repo, top)
1585
1585
1586 inclsubs = checksubstate(repo, hex(patchparent))
1586 inclsubs = checksubstate(repo, hex(patchparent))
1587 if inclsubs:
1587 if inclsubs:
1588 substatestate = repo.dirstate['.hgsubstate']
1588 substatestate = repo.dirstate['.hgsubstate']
1589
1589
1590 ph = patchheader(self.join(patchfn), self.plainmode)
1590 ph = patchheader(self.join(patchfn), self.plainmode)
1591 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1591 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1592 if newuser:
1592 if newuser:
1593 ph.setuser(newuser)
1593 ph.setuser(newuser)
1594 if newdate:
1594 if newdate:
1595 ph.setdate(newdate)
1595 ph.setdate(newdate)
1596 ph.setparent(hex(patchparent))
1596 ph.setparent(hex(patchparent))
1597
1597
1598 # only commit new patch when write is complete
1598 # only commit new patch when write is complete
1599 patchf = self.opener(patchfn, 'w', atomictemp=True)
1599 patchf = self.opener(patchfn, 'w', atomictemp=True)
1600
1600
1601 # update the dirstate in place, strip off the qtip commit
1601 # update the dirstate in place, strip off the qtip commit
1602 # and then commit.
1602 # and then commit.
1603 #
1603 #
1604 # this should really read:
1604 # this should really read:
1605 # mm, dd, aa = repo.status(top, patchparent)[:3]
1605 # mm, dd, aa = repo.status(top, patchparent)[:3]
1606 # but we do it backwards to take advantage of manifest/changelog
1606 # but we do it backwards to take advantage of manifest/changelog
1607 # caching against the next repo.status call
1607 # caching against the next repo.status call
1608 mm, aa, dd = repo.status(patchparent, top)[:3]
1608 mm, aa, dd = repo.status(patchparent, top)[:3]
1609 changes = repo.changelog.read(top)
1609 changes = repo.changelog.read(top)
1610 man = repo.manifest.read(changes[0])
1610 man = repo.manifest.read(changes[0])
1611 aaa = aa[:]
1611 aaa = aa[:]
1612 matchfn = scmutil.match(repo[None], pats, opts)
1612 matchfn = scmutil.match(repo[None], pats, opts)
1613 # in short mode, we only diff the files included in the
1613 # in short mode, we only diff the files included in the
1614 # patch already plus specified files
1614 # patch already plus specified files
1615 if opts.get('short'):
1615 if opts.get('short'):
1616 # if amending a patch, we start with existing
1616 # if amending a patch, we start with existing
1617 # files plus specified files - unfiltered
1617 # files plus specified files - unfiltered
1618 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1618 match = scmutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1619 # filter with include/exclude options
1619 # filter with include/exclude options
1620 matchfn = scmutil.match(repo[None], opts=opts)
1620 matchfn = scmutil.match(repo[None], opts=opts)
1621 else:
1621 else:
1622 match = scmutil.matchall(repo)
1622 match = scmutil.matchall(repo)
1623 m, a, r, d = repo.status(match=match)[:4]
1623 m, a, r, d = repo.status(match=match)[:4]
1624 mm = set(mm)
1624 mm = set(mm)
1625 aa = set(aa)
1625 aa = set(aa)
1626 dd = set(dd)
1626 dd = set(dd)
1627
1627
1628 # we might end up with files that were added between
1628 # we might end up with files that were added between
1629 # qtip and the dirstate parent, but then changed in the
1629 # qtip and the dirstate parent, but then changed in the
1630 # local dirstate. in this case, we want them to only
1630 # local dirstate. in this case, we want them to only
1631 # show up in the added section
1631 # show up in the added section
1632 for x in m:
1632 for x in m:
1633 if x not in aa:
1633 if x not in aa:
1634 mm.add(x)
1634 mm.add(x)
1635 # we might end up with files added by the local dirstate that
1635 # we might end up with files added by the local dirstate that
1636 # were deleted by the patch. In this case, they should only
1636 # were deleted by the patch. In this case, they should only
1637 # show up in the changed section.
1637 # show up in the changed section.
1638 for x in a:
1638 for x in a:
1639 if x in dd:
1639 if x in dd:
1640 dd.remove(x)
1640 dd.remove(x)
1641 mm.add(x)
1641 mm.add(x)
1642 else:
1642 else:
1643 aa.add(x)
1643 aa.add(x)
1644 # make sure any files deleted in the local dirstate
1644 # make sure any files deleted in the local dirstate
1645 # are not in the add or change column of the patch
1645 # are not in the add or change column of the patch
1646 forget = []
1646 forget = []
1647 for x in d + r:
1647 for x in d + r:
1648 if x in aa:
1648 if x in aa:
1649 aa.remove(x)
1649 aa.remove(x)
1650 forget.append(x)
1650 forget.append(x)
1651 continue
1651 continue
1652 else:
1652 else:
1653 mm.discard(x)
1653 mm.discard(x)
1654 dd.add(x)
1654 dd.add(x)
1655
1655
1656 m = list(mm)
1656 m = list(mm)
1657 r = list(dd)
1657 r = list(dd)
1658 a = list(aa)
1658 a = list(aa)
1659
1659
1660 # create 'match' that includes the files to be recommitted.
1660 # create 'match' that includes the files to be recommitted.
1661 # apply matchfn via repo.status to ensure correct case handling.
1661 # apply matchfn via repo.status to ensure correct case handling.
1662 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1662 cm, ca, cr, cd = repo.status(patchparent, match=matchfn)[:4]
1663 allmatches = set(cm + ca + cr + cd)
1663 allmatches = set(cm + ca + cr + cd)
1664 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1664 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1665
1665
1666 files = set(inclsubs)
1666 files = set(inclsubs)
1667 for x in refreshchanges:
1667 for x in refreshchanges:
1668 files.update(x)
1668 files.update(x)
1669 match = scmutil.matchfiles(repo, files)
1669 match = scmutil.matchfiles(repo, files)
1670
1670
1671 bmlist = repo[top].bookmarks()
1671 bmlist = repo[top].bookmarks()
1672
1672
1673 try:
1673 try:
1674 repo.dirstate.beginparentchange()
1674 repo.dirstate.beginparentchange()
1675 if diffopts.git or diffopts.upgrade:
1675 if diffopts.git or diffopts.upgrade:
1676 copies = {}
1676 copies = {}
1677 for dst in a:
1677 for dst in a:
1678 src = repo.dirstate.copied(dst)
1678 src = repo.dirstate.copied(dst)
1679 # during qfold, the source file for copies may
1679 # during qfold, the source file for copies may
1680 # be removed. Treat this as a simple add.
1680 # be removed. Treat this as a simple add.
1681 if src is not None and src in repo.dirstate:
1681 if src is not None and src in repo.dirstate:
1682 copies.setdefault(src, []).append(dst)
1682 copies.setdefault(src, []).append(dst)
1683 repo.dirstate.add(dst)
1683 repo.dirstate.add(dst)
1684 # remember the copies between patchparent and qtip
1684 # remember the copies between patchparent and qtip
1685 for dst in aaa:
1685 for dst in aaa:
1686 f = repo.file(dst)
1686 f = repo.file(dst)
1687 src = f.renamed(man[dst])
1687 src = f.renamed(man[dst])
1688 if src:
1688 if src:
1689 copies.setdefault(src[0], []).extend(
1689 copies.setdefault(src[0], []).extend(
1690 copies.get(dst, []))
1690 copies.get(dst, []))
1691 if dst in a:
1691 if dst in a:
1692 copies[src[0]].append(dst)
1692 copies[src[0]].append(dst)
1693 # we can't copy a file created by the patch itself
1693 # we can't copy a file created by the patch itself
1694 if dst in copies:
1694 if dst in copies:
1695 del copies[dst]
1695 del copies[dst]
1696 for src, dsts in copies.iteritems():
1696 for src, dsts in copies.iteritems():
1697 for dst in dsts:
1697 for dst in dsts:
1698 repo.dirstate.copy(src, dst)
1698 repo.dirstate.copy(src, dst)
1699 else:
1699 else:
1700 for dst in a:
1700 for dst in a:
1701 repo.dirstate.add(dst)
1701 repo.dirstate.add(dst)
1702 # Drop useless copy information
1702 # Drop useless copy information
1703 for f in list(repo.dirstate.copies()):
1703 for f in list(repo.dirstate.copies()):
1704 repo.dirstate.copy(None, f)
1704 repo.dirstate.copy(None, f)
1705 for f in r:
1705 for f in r:
1706 repo.dirstate.remove(f)
1706 repo.dirstate.remove(f)
1707 # if the patch excludes a modified file, mark that
1707 # if the patch excludes a modified file, mark that
1708 # file with mtime=0 so status can see it.
1708 # file with mtime=0 so status can see it.
1709 mm = []
1709 mm = []
1710 for i in xrange(len(m) - 1, -1, -1):
1710 for i in xrange(len(m) - 1, -1, -1):
1711 if not matchfn(m[i]):
1711 if not matchfn(m[i]):
1712 mm.append(m[i])
1712 mm.append(m[i])
1713 del m[i]
1713 del m[i]
1714 for f in m:
1714 for f in m:
1715 repo.dirstate.normal(f)
1715 repo.dirstate.normal(f)
1716 for f in mm:
1716 for f in mm:
1717 repo.dirstate.normallookup(f)
1717 repo.dirstate.normallookup(f)
1718 for f in forget:
1718 for f in forget:
1719 repo.dirstate.drop(f)
1719 repo.dirstate.drop(f)
1720
1720
1721 user = ph.user or changes[1]
1721 user = ph.user or changes[1]
1722
1722
1723 oldphase = repo[top].phase()
1723 oldphase = repo[top].phase()
1724
1724
1725 # assumes strip can roll itself back if interrupted
1725 # assumes strip can roll itself back if interrupted
1726 repo.setparents(*cparents)
1726 repo.setparents(*cparents)
1727 repo.dirstate.endparentchange()
1727 repo.dirstate.endparentchange()
1728 self.applied.pop()
1728 self.applied.pop()
1729 self.applieddirty = True
1729 self.applieddirty = True
1730 strip(self.ui, repo, [top], update=False, backup=False)
1730 strip(self.ui, repo, [top], update=False, backup=False)
1731 except: # re-raises
1731 except: # re-raises
1732 repo.dirstate.invalidate()
1732 repo.dirstate.invalidate()
1733 raise
1733 raise
1734
1734
1735 try:
1735 try:
1736 # might be nice to attempt to roll back strip after this
1736 # might be nice to attempt to roll back strip after this
1737
1737
1738 defaultmsg = "[mq]: %s" % patchfn
1738 defaultmsg = "[mq]: %s" % patchfn
1739 editor = cmdutil.getcommiteditor(editform=editform)
1739 editor = cmdutil.getcommiteditor(editform=editform)
1740 if edit:
1740 if edit:
1741 def finishdesc(desc):
1741 def finishdesc(desc):
1742 if desc.rstrip():
1742 if desc.rstrip():
1743 ph.setmessage(desc)
1743 ph.setmessage(desc)
1744 return desc
1744 return desc
1745 return defaultmsg
1745 return defaultmsg
1746 # i18n: this message is shown in editor with "HG: " prefix
1746 # i18n: this message is shown in editor with "HG: " prefix
1747 extramsg = _('Leave message empty to use default message.')
1747 extramsg = _('Leave message empty to use default message.')
1748 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1748 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1749 extramsg=extramsg,
1749 extramsg=extramsg,
1750 editform=editform)
1750 editform=editform)
1751 message = msg or "\n".join(ph.message)
1751 message = msg or "\n".join(ph.message)
1752 elif not msg:
1752 elif not msg:
1753 if not ph.message:
1753 if not ph.message:
1754 message = defaultmsg
1754 message = defaultmsg
1755 else:
1755 else:
1756 message = "\n".join(ph.message)
1756 message = "\n".join(ph.message)
1757 else:
1757 else:
1758 message = msg
1758 message = msg
1759 ph.setmessage(msg)
1759 ph.setmessage(msg)
1760
1760
1761 # Ensure we create a new changeset in the same phase than
1761 # Ensure we create a new changeset in the same phase than
1762 # the old one.
1762 # the old one.
1763 n = newcommit(repo, oldphase, message, user, ph.date,
1763 n = newcommit(repo, oldphase, message, user, ph.date,
1764 match=match, force=True, editor=editor)
1764 match=match, force=True, editor=editor)
1765 # only write patch after a successful commit
1765 # only write patch after a successful commit
1766 c = [list(x) for x in refreshchanges]
1766 c = [list(x) for x in refreshchanges]
1767 if inclsubs:
1767 if inclsubs:
1768 self.putsubstate2changes(substatestate, c)
1768 self.putsubstate2changes(substatestate, c)
1769 chunks = patchmod.diff(repo, patchparent,
1769 chunks = patchmod.diff(repo, patchparent,
1770 changes=c, opts=diffopts)
1770 changes=c, opts=diffopts)
1771 comments = str(ph)
1771 comments = str(ph)
1772 if comments:
1772 if comments:
1773 patchf.write(comments)
1773 patchf.write(comments)
1774 for chunk in chunks:
1774 for chunk in chunks:
1775 patchf.write(chunk)
1775 patchf.write(chunk)
1776 patchf.close()
1776 patchf.close()
1777
1777
1778 marks = repo._bookmarks
1778 marks = repo._bookmarks
1779 for bm in bmlist:
1779 for bm in bmlist:
1780 marks[bm] = n
1780 marks[bm] = n
1781 marks.write()
1781 marks.write()
1782
1782
1783 self.applied.append(statusentry(n, patchfn))
1783 self.applied.append(statusentry(n, patchfn))
1784 except: # re-raises
1784 except: # re-raises
1785 ctx = repo[cparents[0]]
1785 ctx = repo[cparents[0]]
1786 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1786 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1787 self.savedirty()
1787 self.savedirty()
1788 self.ui.warn(_('refresh interrupted while patch was popped! '
1788 self.ui.warn(_('refresh interrupted while patch was popped! '
1789 '(revert --all, qpush to recover)\n'))
1789 '(revert --all, qpush to recover)\n'))
1790 raise
1790 raise
1791 finally:
1791 finally:
1792 wlock.release()
1792 wlock.release()
1793 self.removeundo(repo)
1793 self.removeundo(repo)
1794
1794
1795 def init(self, repo, create=False):
1795 def init(self, repo, create=False):
1796 if not create and os.path.isdir(self.path):
1796 if not create and os.path.isdir(self.path):
1797 raise util.Abort(_("patch queue directory already exists"))
1797 raise util.Abort(_("patch queue directory already exists"))
1798 try:
1798 try:
1799 os.mkdir(self.path)
1799 os.mkdir(self.path)
1800 except OSError, inst:
1800 except OSError, inst:
1801 if inst.errno != errno.EEXIST or not create:
1801 if inst.errno != errno.EEXIST or not create:
1802 raise
1802 raise
1803 if create:
1803 if create:
1804 return self.qrepo(create=True)
1804 return self.qrepo(create=True)
1805
1805
1806 def unapplied(self, repo, patch=None):
1806 def unapplied(self, repo, patch=None):
1807 if patch and patch not in self.series:
1807 if patch and patch not in self.series:
1808 raise util.Abort(_("patch %s is not in series file") % patch)
1808 raise util.Abort(_("patch %s is not in series file") % patch)
1809 if not patch:
1809 if not patch:
1810 start = self.seriesend()
1810 start = self.seriesend()
1811 else:
1811 else:
1812 start = self.series.index(patch) + 1
1812 start = self.series.index(patch) + 1
1813 unapplied = []
1813 unapplied = []
1814 for i in xrange(start, len(self.series)):
1814 for i in xrange(start, len(self.series)):
1815 pushable, reason = self.pushable(i)
1815 pushable, reason = self.pushable(i)
1816 if pushable:
1816 if pushable:
1817 unapplied.append((i, self.series[i]))
1817 unapplied.append((i, self.series[i]))
1818 self.explainpushable(i)
1818 self.explainpushable(i)
1819 return unapplied
1819 return unapplied
1820
1820
1821 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1821 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1822 summary=False):
1822 summary=False):
1823 def displayname(pfx, patchname, state):
1823 def displayname(pfx, patchname, state):
1824 if pfx:
1824 if pfx:
1825 self.ui.write(pfx)
1825 self.ui.write(pfx)
1826 if summary:
1826 if summary:
1827 ph = patchheader(self.join(patchname), self.plainmode)
1827 ph = patchheader(self.join(patchname), self.plainmode)
1828 msg = ph.message and ph.message[0] or ''
1828 msg = ph.message and ph.message[0] or ''
1829 if self.ui.formatted():
1829 if self.ui.formatted():
1830 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1830 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1831 if width > 0:
1831 if width > 0:
1832 msg = util.ellipsis(msg, width)
1832 msg = util.ellipsis(msg, width)
1833 else:
1833 else:
1834 msg = ''
1834 msg = ''
1835 self.ui.write(patchname, label='qseries.' + state)
1835 self.ui.write(patchname, label='qseries.' + state)
1836 self.ui.write(': ')
1836 self.ui.write(': ')
1837 self.ui.write(msg, label='qseries.message.' + state)
1837 self.ui.write(msg, label='qseries.message.' + state)
1838 else:
1838 else:
1839 self.ui.write(patchname, label='qseries.' + state)
1839 self.ui.write(patchname, label='qseries.' + state)
1840 self.ui.write('\n')
1840 self.ui.write('\n')
1841
1841
1842 applied = set([p.name for p in self.applied])
1842 applied = set([p.name for p in self.applied])
1843 if length is None:
1843 if length is None:
1844 length = len(self.series) - start
1844 length = len(self.series) - start
1845 if not missing:
1845 if not missing:
1846 if self.ui.verbose:
1846 if self.ui.verbose:
1847 idxwidth = len(str(start + length - 1))
1847 idxwidth = len(str(start + length - 1))
1848 for i in xrange(start, start + length):
1848 for i in xrange(start, start + length):
1849 patch = self.series[i]
1849 patch = self.series[i]
1850 if patch in applied:
1850 if patch in applied:
1851 char, state = 'A', 'applied'
1851 char, state = 'A', 'applied'
1852 elif self.pushable(i)[0]:
1852 elif self.pushable(i)[0]:
1853 char, state = 'U', 'unapplied'
1853 char, state = 'U', 'unapplied'
1854 else:
1854 else:
1855 char, state = 'G', 'guarded'
1855 char, state = 'G', 'guarded'
1856 pfx = ''
1856 pfx = ''
1857 if self.ui.verbose:
1857 if self.ui.verbose:
1858 pfx = '%*d %s ' % (idxwidth, i, char)
1858 pfx = '%*d %s ' % (idxwidth, i, char)
1859 elif status and status != char:
1859 elif status and status != char:
1860 continue
1860 continue
1861 displayname(pfx, patch, state)
1861 displayname(pfx, patch, state)
1862 else:
1862 else:
1863 msng_list = []
1863 msng_list = []
1864 for root, dirs, files in os.walk(self.path):
1864 for root, dirs, files in os.walk(self.path):
1865 d = root[len(self.path) + 1:]
1865 d = root[len(self.path) + 1:]
1866 for f in files:
1866 for f in files:
1867 fl = os.path.join(d, f)
1867 fl = os.path.join(d, f)
1868 if (fl not in self.series and
1868 if (fl not in self.series and
1869 fl not in (self.statuspath, self.seriespath,
1869 fl not in (self.statuspath, self.seriespath,
1870 self.guardspath)
1870 self.guardspath)
1871 and not fl.startswith('.')):
1871 and not fl.startswith('.')):
1872 msng_list.append(fl)
1872 msng_list.append(fl)
1873 for x in sorted(msng_list):
1873 for x in sorted(msng_list):
1874 pfx = self.ui.verbose and ('D ') or ''
1874 pfx = self.ui.verbose and ('D ') or ''
1875 displayname(pfx, x, 'missing')
1875 displayname(pfx, x, 'missing')
1876
1876
1877 def issaveline(self, l):
1877 def issaveline(self, l):
1878 if l.name == '.hg.patches.save.line':
1878 if l.name == '.hg.patches.save.line':
1879 return True
1879 return True
1880
1880
1881 def qrepo(self, create=False):
1881 def qrepo(self, create=False):
1882 ui = self.baseui.copy()
1882 ui = self.baseui.copy()
1883 if create or os.path.isdir(self.join(".hg")):
1883 if create or os.path.isdir(self.join(".hg")):
1884 return hg.repository(ui, path=self.path, create=create)
1884 return hg.repository(ui, path=self.path, create=create)
1885
1885
1886 def restore(self, repo, rev, delete=None, qupdate=None):
1886 def restore(self, repo, rev, delete=None, qupdate=None):
1887 desc = repo[rev].description().strip()
1887 desc = repo[rev].description().strip()
1888 lines = desc.splitlines()
1888 lines = desc.splitlines()
1889 i = 0
1889 i = 0
1890 datastart = None
1890 datastart = None
1891 series = []
1891 series = []
1892 applied = []
1892 applied = []
1893 qpp = None
1893 qpp = None
1894 for i, line in enumerate(lines):
1894 for i, line in enumerate(lines):
1895 if line == 'Patch Data:':
1895 if line == 'Patch Data:':
1896 datastart = i + 1
1896 datastart = i + 1
1897 elif line.startswith('Dirstate:'):
1897 elif line.startswith('Dirstate:'):
1898 l = line.rstrip()
1898 l = line.rstrip()
1899 l = l[10:].split(' ')
1899 l = l[10:].split(' ')
1900 qpp = [bin(x) for x in l]
1900 qpp = [bin(x) for x in l]
1901 elif datastart is not None:
1901 elif datastart is not None:
1902 l = line.rstrip()
1902 l = line.rstrip()
1903 n, name = l.split(':', 1)
1903 n, name = l.split(':', 1)
1904 if n:
1904 if n:
1905 applied.append(statusentry(bin(n), name))
1905 applied.append(statusentry(bin(n), name))
1906 else:
1906 else:
1907 series.append(l)
1907 series.append(l)
1908 if datastart is None:
1908 if datastart is None:
1909 self.ui.warn(_("no saved patch data found\n"))
1909 self.ui.warn(_("no saved patch data found\n"))
1910 return 1
1910 return 1
1911 self.ui.warn(_("restoring status: %s\n") % lines[0])
1911 self.ui.warn(_("restoring status: %s\n") % lines[0])
1912 self.fullseries = series
1912 self.fullseries = series
1913 self.applied = applied
1913 self.applied = applied
1914 self.parseseries()
1914 self.parseseries()
1915 self.seriesdirty = True
1915 self.seriesdirty = True
1916 self.applieddirty = True
1916 self.applieddirty = True
1917 heads = repo.changelog.heads()
1917 heads = repo.changelog.heads()
1918 if delete:
1918 if delete:
1919 if rev not in heads:
1919 if rev not in heads:
1920 self.ui.warn(_("save entry has children, leaving it alone\n"))
1920 self.ui.warn(_("save entry has children, leaving it alone\n"))
1921 else:
1921 else:
1922 self.ui.warn(_("removing save entry %s\n") % short(rev))
1922 self.ui.warn(_("removing save entry %s\n") % short(rev))
1923 pp = repo.dirstate.parents()
1923 pp = repo.dirstate.parents()
1924 if rev in pp:
1924 if rev in pp:
1925 update = True
1925 update = True
1926 else:
1926 else:
1927 update = False
1927 update = False
1928 strip(self.ui, repo, [rev], update=update, backup=False)
1928 strip(self.ui, repo, [rev], update=update, backup=False)
1929 if qpp:
1929 if qpp:
1930 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1930 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1931 (short(qpp[0]), short(qpp[1])))
1931 (short(qpp[0]), short(qpp[1])))
1932 if qupdate:
1932 if qupdate:
1933 self.ui.status(_("updating queue directory\n"))
1933 self.ui.status(_("updating queue directory\n"))
1934 r = self.qrepo()
1934 r = self.qrepo()
1935 if not r:
1935 if not r:
1936 self.ui.warn(_("unable to load queue repository\n"))
1936 self.ui.warn(_("unable to load queue repository\n"))
1937 return 1
1937 return 1
1938 hg.clean(r, qpp[0])
1938 hg.clean(r, qpp[0])
1939
1939
1940 def save(self, repo, msg=None):
1940 def save(self, repo, msg=None):
1941 if not self.applied:
1941 if not self.applied:
1942 self.ui.warn(_("save: no patches applied, exiting\n"))
1942 self.ui.warn(_("save: no patches applied, exiting\n"))
1943 return 1
1943 return 1
1944 if self.issaveline(self.applied[-1]):
1944 if self.issaveline(self.applied[-1]):
1945 self.ui.warn(_("status is already saved\n"))
1945 self.ui.warn(_("status is already saved\n"))
1946 return 1
1946 return 1
1947
1947
1948 if not msg:
1948 if not msg:
1949 msg = _("hg patches saved state")
1949 msg = _("hg patches saved state")
1950 else:
1950 else:
1951 msg = "hg patches: " + msg.rstrip('\r\n')
1951 msg = "hg patches: " + msg.rstrip('\r\n')
1952 r = self.qrepo()
1952 r = self.qrepo()
1953 if r:
1953 if r:
1954 pp = r.dirstate.parents()
1954 pp = r.dirstate.parents()
1955 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1955 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1956 msg += "\n\nPatch Data:\n"
1956 msg += "\n\nPatch Data:\n"
1957 msg += ''.join('%s\n' % x for x in self.applied)
1957 msg += ''.join('%s\n' % x for x in self.applied)
1958 msg += ''.join(':%s\n' % x for x in self.fullseries)
1958 msg += ''.join(':%s\n' % x for x in self.fullseries)
1959 n = repo.commit(msg, force=True)
1959 n = repo.commit(msg, force=True)
1960 if not n:
1960 if not n:
1961 self.ui.warn(_("repo commit failed\n"))
1961 self.ui.warn(_("repo commit failed\n"))
1962 return 1
1962 return 1
1963 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1963 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1964 self.applieddirty = True
1964 self.applieddirty = True
1965 self.removeundo(repo)
1965 self.removeundo(repo)
1966
1966
1967 def fullseriesend(self):
1967 def fullseriesend(self):
1968 if self.applied:
1968 if self.applied:
1969 p = self.applied[-1].name
1969 p = self.applied[-1].name
1970 end = self.findseries(p)
1970 end = self.findseries(p)
1971 if end is None:
1971 if end is None:
1972 return len(self.fullseries)
1972 return len(self.fullseries)
1973 return end + 1
1973 return end + 1
1974 return 0
1974 return 0
1975
1975
1976 def seriesend(self, all_patches=False):
1976 def seriesend(self, all_patches=False):
1977 """If all_patches is False, return the index of the next pushable patch
1977 """If all_patches is False, return the index of the next pushable patch
1978 in the series, or the series length. If all_patches is True, return the
1978 in the series, or the series length. If all_patches is True, return the
1979 index of the first patch past the last applied one.
1979 index of the first patch past the last applied one.
1980 """
1980 """
1981 end = 0
1981 end = 0
1982 def nextpatch(start):
1982 def nextpatch(start):
1983 if all_patches or start >= len(self.series):
1983 if all_patches or start >= len(self.series):
1984 return start
1984 return start
1985 for i in xrange(start, len(self.series)):
1985 for i in xrange(start, len(self.series)):
1986 p, reason = self.pushable(i)
1986 p, reason = self.pushable(i)
1987 if p:
1987 if p:
1988 return i
1988 return i
1989 self.explainpushable(i)
1989 self.explainpushable(i)
1990 return len(self.series)
1990 return len(self.series)
1991 if self.applied:
1991 if self.applied:
1992 p = self.applied[-1].name
1992 p = self.applied[-1].name
1993 try:
1993 try:
1994 end = self.series.index(p)
1994 end = self.series.index(p)
1995 except ValueError:
1995 except ValueError:
1996 return 0
1996 return 0
1997 return nextpatch(end + 1)
1997 return nextpatch(end + 1)
1998 return nextpatch(end)
1998 return nextpatch(end)
1999
1999
2000 def appliedname(self, index):
2000 def appliedname(self, index):
2001 pname = self.applied[index].name
2001 pname = self.applied[index].name
2002 if not self.ui.verbose:
2002 if not self.ui.verbose:
2003 p = pname
2003 p = pname
2004 else:
2004 else:
2005 p = str(self.series.index(pname)) + " " + pname
2005 p = str(self.series.index(pname)) + " " + pname
2006 return p
2006 return p
2007
2007
2008 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2008 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2009 force=None, git=False):
2009 force=None, git=False):
2010 def checkseries(patchname):
2010 def checkseries(patchname):
2011 if patchname in self.series:
2011 if patchname in self.series:
2012 raise util.Abort(_('patch %s is already in the series file')
2012 raise util.Abort(_('patch %s is already in the series file')
2013 % patchname)
2013 % patchname)
2014
2014
2015 if rev:
2015 if rev:
2016 if files:
2016 if files:
2017 raise util.Abort(_('option "-r" not valid when importing '
2017 raise util.Abort(_('option "-r" not valid when importing '
2018 'files'))
2018 'files'))
2019 rev = scmutil.revrange(repo, rev)
2019 rev = scmutil.revrange(repo, rev)
2020 rev.sort(reverse=True)
2020 rev.sort(reverse=True)
2021 elif not files:
2021 elif not files:
2022 raise util.Abort(_('no files or revisions specified'))
2022 raise util.Abort(_('no files or revisions specified'))
2023 if (len(files) > 1 or len(rev) > 1) and patchname:
2023 if (len(files) > 1 or len(rev) > 1) and patchname:
2024 raise util.Abort(_('option "-n" not valid when importing multiple '
2024 raise util.Abort(_('option "-n" not valid when importing multiple '
2025 'patches'))
2025 'patches'))
2026 imported = []
2026 imported = []
2027 if rev:
2027 if rev:
2028 # If mq patches are applied, we can only import revisions
2028 # If mq patches are applied, we can only import revisions
2029 # that form a linear path to qbase.
2029 # that form a linear path to qbase.
2030 # Otherwise, they should form a linear path to a head.
2030 # Otherwise, they should form a linear path to a head.
2031 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2031 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2032 if len(heads) > 1:
2032 if len(heads) > 1:
2033 raise util.Abort(_('revision %d is the root of more than one '
2033 raise util.Abort(_('revision %d is the root of more than one '
2034 'branch') % rev.last())
2034 'branch') % rev.last())
2035 if self.applied:
2035 if self.applied:
2036 base = repo.changelog.node(rev.first())
2036 base = repo.changelog.node(rev.first())
2037 if base in [n.node for n in self.applied]:
2037 if base in [n.node for n in self.applied]:
2038 raise util.Abort(_('revision %d is already managed')
2038 raise util.Abort(_('revision %d is already managed')
2039 % rev.first())
2039 % rev.first())
2040 if heads != [self.applied[-1].node]:
2040 if heads != [self.applied[-1].node]:
2041 raise util.Abort(_('revision %d is not the parent of '
2041 raise util.Abort(_('revision %d is not the parent of '
2042 'the queue') % rev.first())
2042 'the queue') % rev.first())
2043 base = repo.changelog.rev(self.applied[0].node)
2043 base = repo.changelog.rev(self.applied[0].node)
2044 lastparent = repo.changelog.parentrevs(base)[0]
2044 lastparent = repo.changelog.parentrevs(base)[0]
2045 else:
2045 else:
2046 if heads != [repo.changelog.node(rev.first())]:
2046 if heads != [repo.changelog.node(rev.first())]:
2047 raise util.Abort(_('revision %d has unmanaged children')
2047 raise util.Abort(_('revision %d has unmanaged children')
2048 % rev.first())
2048 % rev.first())
2049 lastparent = None
2049 lastparent = None
2050
2050
2051 diffopts = self.diffopts({'git': git})
2051 diffopts = self.diffopts({'git': git})
2052 tr = repo.transaction('qimport')
2052 tr = repo.transaction('qimport')
2053 try:
2053 try:
2054 for r in rev:
2054 for r in rev:
2055 if not repo[r].mutable():
2055 if not repo[r].mutable():
2056 raise util.Abort(_('revision %d is not mutable') % r,
2056 raise util.Abort(_('revision %d is not mutable') % r,
2057 hint=_('see "hg help phases" '
2057 hint=_('see "hg help phases" '
2058 'for details'))
2058 'for details'))
2059 p1, p2 = repo.changelog.parentrevs(r)
2059 p1, p2 = repo.changelog.parentrevs(r)
2060 n = repo.changelog.node(r)
2060 n = repo.changelog.node(r)
2061 if p2 != nullrev:
2061 if p2 != nullrev:
2062 raise util.Abort(_('cannot import merge revision %d')
2062 raise util.Abort(_('cannot import merge revision %d')
2063 % r)
2063 % r)
2064 if lastparent and lastparent != r:
2064 if lastparent and lastparent != r:
2065 raise util.Abort(_('revision %d is not the parent of '
2065 raise util.Abort(_('revision %d is not the parent of '
2066 '%d')
2066 '%d')
2067 % (r, lastparent))
2067 % (r, lastparent))
2068 lastparent = p1
2068 lastparent = p1
2069
2069
2070 if not patchname:
2070 if not patchname:
2071 patchname = normname('%d.diff' % r)
2071 patchname = normname('%d.diff' % r)
2072 checkseries(patchname)
2072 checkseries(patchname)
2073 self.checkpatchname(patchname, force)
2073 self.checkpatchname(patchname, force)
2074 self.fullseries.insert(0, patchname)
2074 self.fullseries.insert(0, patchname)
2075
2075
2076 patchf = self.opener(patchname, "w")
2076 patchf = self.opener(patchname, "w")
2077 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2077 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
2078 patchf.close()
2078 patchf.close()
2079
2079
2080 se = statusentry(n, patchname)
2080 se = statusentry(n, patchname)
2081 self.applied.insert(0, se)
2081 self.applied.insert(0, se)
2082
2082
2083 self.added.append(patchname)
2083 self.added.append(patchname)
2084 imported.append(patchname)
2084 imported.append(patchname)
2085 patchname = None
2085 patchname = None
2086 if rev and repo.ui.configbool('mq', 'secret', False):
2086 if rev and repo.ui.configbool('mq', 'secret', False):
2087 # if we added anything with --rev, move the secret root
2087 # if we added anything with --rev, move the secret root
2088 phases.retractboundary(repo, tr, phases.secret, [n])
2088 phases.retractboundary(repo, tr, phases.secret, [n])
2089 self.parseseries()
2089 self.parseseries()
2090 self.applieddirty = True
2090 self.applieddirty = True
2091 self.seriesdirty = True
2091 self.seriesdirty = True
2092 tr.close()
2092 tr.close()
2093 finally:
2093 finally:
2094 tr.release()
2094 tr.release()
2095
2095
2096 for i, filename in enumerate(files):
2096 for i, filename in enumerate(files):
2097 if existing:
2097 if existing:
2098 if filename == '-':
2098 if filename == '-':
2099 raise util.Abort(_('-e is incompatible with import from -'))
2099 raise util.Abort(_('-e is incompatible with import from -'))
2100 filename = normname(filename)
2100 filename = normname(filename)
2101 self.checkreservedname(filename)
2101 self.checkreservedname(filename)
2102 if util.url(filename).islocal():
2102 if util.url(filename).islocal():
2103 originpath = self.join(filename)
2103 originpath = self.join(filename)
2104 if not os.path.isfile(originpath):
2104 if not os.path.isfile(originpath):
2105 raise util.Abort(
2105 raise util.Abort(
2106 _("patch %s does not exist") % filename)
2106 _("patch %s does not exist") % filename)
2107
2107
2108 if patchname:
2108 if patchname:
2109 self.checkpatchname(patchname, force)
2109 self.checkpatchname(patchname, force)
2110
2110
2111 self.ui.write(_('renaming %s to %s\n')
2111 self.ui.write(_('renaming %s to %s\n')
2112 % (filename, patchname))
2112 % (filename, patchname))
2113 util.rename(originpath, self.join(patchname))
2113 util.rename(originpath, self.join(patchname))
2114 else:
2114 else:
2115 patchname = filename
2115 patchname = filename
2116
2116
2117 else:
2117 else:
2118 if filename == '-' and not patchname:
2118 if filename == '-' and not patchname:
2119 raise util.Abort(_('need --name to import a patch from -'))
2119 raise util.Abort(_('need --name to import a patch from -'))
2120 elif not patchname:
2120 elif not patchname:
2121 patchname = normname(os.path.basename(filename.rstrip('/')))
2121 patchname = normname(os.path.basename(filename.rstrip('/')))
2122 self.checkpatchname(patchname, force)
2122 self.checkpatchname(patchname, force)
2123 try:
2123 try:
2124 if filename == '-':
2124 if filename == '-':
2125 text = self.ui.fin.read()
2125 text = self.ui.fin.read()
2126 else:
2126 else:
2127 fp = hg.openpath(self.ui, filename)
2127 fp = hg.openpath(self.ui, filename)
2128 text = fp.read()
2128 text = fp.read()
2129 fp.close()
2129 fp.close()
2130 except (OSError, IOError):
2130 except (OSError, IOError):
2131 raise util.Abort(_("unable to read file %s") % filename)
2131 raise util.Abort(_("unable to read file %s") % filename)
2132 patchf = self.opener(patchname, "w")
2132 patchf = self.opener(patchname, "w")
2133 patchf.write(text)
2133 patchf.write(text)
2134 patchf.close()
2134 patchf.close()
2135 if not force:
2135 if not force:
2136 checkseries(patchname)
2136 checkseries(patchname)
2137 if patchname not in self.series:
2137 if patchname not in self.series:
2138 index = self.fullseriesend() + i
2138 index = self.fullseriesend() + i
2139 self.fullseries[index:index] = [patchname]
2139 self.fullseries[index:index] = [patchname]
2140 self.parseseries()
2140 self.parseseries()
2141 self.seriesdirty = True
2141 self.seriesdirty = True
2142 self.ui.warn(_("adding %s to series file\n") % patchname)
2142 self.ui.warn(_("adding %s to series file\n") % patchname)
2143 self.added.append(patchname)
2143 self.added.append(patchname)
2144 imported.append(patchname)
2144 imported.append(patchname)
2145 patchname = None
2145 patchname = None
2146
2146
2147 self.removeundo(repo)
2147 self.removeundo(repo)
2148 return imported
2148 return imported
2149
2149
2150 def fixkeepchangesopts(ui, opts):
2150 def fixkeepchangesopts(ui, opts):
2151 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2151 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2152 or opts.get('exact')):
2152 or opts.get('exact')):
2153 return opts
2153 return opts
2154 opts = dict(opts)
2154 opts = dict(opts)
2155 opts['keep_changes'] = True
2155 opts['keep_changes'] = True
2156 return opts
2156 return opts
2157
2157
2158 @command("qdelete|qremove|qrm",
2158 @command("qdelete|qremove|qrm",
2159 [('k', 'keep', None, _('keep patch file')),
2159 [('k', 'keep', None, _('keep patch file')),
2160 ('r', 'rev', [],
2160 ('r', 'rev', [],
2161 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2161 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2162 _('hg qdelete [-k] [PATCH]...'))
2162 _('hg qdelete [-k] [PATCH]...'))
2163 def delete(ui, repo, *patches, **opts):
2163 def delete(ui, repo, *patches, **opts):
2164 """remove patches from queue
2164 """remove patches from queue
2165
2165
2166 The patches must not be applied, and at least one patch is required. Exact
2166 The patches must not be applied, and at least one patch is required. Exact
2167 patch identifiers must be given. With -k/--keep, the patch files are
2167 patch identifiers must be given. With -k/--keep, the patch files are
2168 preserved in the patch directory.
2168 preserved in the patch directory.
2169
2169
2170 To stop managing a patch and move it into permanent history,
2170 To stop managing a patch and move it into permanent history,
2171 use the :hg:`qfinish` command."""
2171 use the :hg:`qfinish` command."""
2172 q = repo.mq
2172 q = repo.mq
2173 q.delete(repo, patches, opts)
2173 q.delete(repo, patches, opts)
2174 q.savedirty()
2174 q.savedirty()
2175 return 0
2175 return 0
2176
2176
2177 @command("qapplied",
2177 @command("qapplied",
2178 [('1', 'last', None, _('show only the preceding applied patch'))
2178 [('1', 'last', None, _('show only the preceding applied patch'))
2179 ] + seriesopts,
2179 ] + seriesopts,
2180 _('hg qapplied [-1] [-s] [PATCH]'))
2180 _('hg qapplied [-1] [-s] [PATCH]'))
2181 def applied(ui, repo, patch=None, **opts):
2181 def applied(ui, repo, patch=None, **opts):
2182 """print the patches already applied
2182 """print the patches already applied
2183
2183
2184 Returns 0 on success."""
2184 Returns 0 on success."""
2185
2185
2186 q = repo.mq
2186 q = repo.mq
2187
2187
2188 if patch:
2188 if patch:
2189 if patch not in q.series:
2189 if patch not in q.series:
2190 raise util.Abort(_("patch %s is not in series file") % patch)
2190 raise util.Abort(_("patch %s is not in series file") % patch)
2191 end = q.series.index(patch) + 1
2191 end = q.series.index(patch) + 1
2192 else:
2192 else:
2193 end = q.seriesend(True)
2193 end = q.seriesend(True)
2194
2194
2195 if opts.get('last') and not end:
2195 if opts.get('last') and not end:
2196 ui.write(_("no patches applied\n"))
2196 ui.write(_("no patches applied\n"))
2197 return 1
2197 return 1
2198 elif opts.get('last') and end == 1:
2198 elif opts.get('last') and end == 1:
2199 ui.write(_("only one patch applied\n"))
2199 ui.write(_("only one patch applied\n"))
2200 return 1
2200 return 1
2201 elif opts.get('last'):
2201 elif opts.get('last'):
2202 start = end - 2
2202 start = end - 2
2203 end = 1
2203 end = 1
2204 else:
2204 else:
2205 start = 0
2205 start = 0
2206
2206
2207 q.qseries(repo, length=end, start=start, status='A',
2207 q.qseries(repo, length=end, start=start, status='A',
2208 summary=opts.get('summary'))
2208 summary=opts.get('summary'))
2209
2209
2210
2210
2211 @command("qunapplied",
2211 @command("qunapplied",
2212 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2212 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2213 _('hg qunapplied [-1] [-s] [PATCH]'))
2213 _('hg qunapplied [-1] [-s] [PATCH]'))
2214 def unapplied(ui, repo, patch=None, **opts):
2214 def unapplied(ui, repo, patch=None, **opts):
2215 """print the patches not yet applied
2215 """print the patches not yet applied
2216
2216
2217 Returns 0 on success."""
2217 Returns 0 on success."""
2218
2218
2219 q = repo.mq
2219 q = repo.mq
2220 if patch:
2220 if patch:
2221 if patch not in q.series:
2221 if patch not in q.series:
2222 raise util.Abort(_("patch %s is not in series file") % patch)
2222 raise util.Abort(_("patch %s is not in series file") % patch)
2223 start = q.series.index(patch) + 1
2223 start = q.series.index(patch) + 1
2224 else:
2224 else:
2225 start = q.seriesend(True)
2225 start = q.seriesend(True)
2226
2226
2227 if start == len(q.series) and opts.get('first'):
2227 if start == len(q.series) and opts.get('first'):
2228 ui.write(_("all patches applied\n"))
2228 ui.write(_("all patches applied\n"))
2229 return 1
2229 return 1
2230
2230
2231 length = opts.get('first') and 1 or None
2231 length = opts.get('first') and 1 or None
2232 q.qseries(repo, start=start, length=length, status='U',
2232 q.qseries(repo, start=start, length=length, status='U',
2233 summary=opts.get('summary'))
2233 summary=opts.get('summary'))
2234
2234
2235 @command("qimport",
2235 @command("qimport",
2236 [('e', 'existing', None, _('import file in patch directory')),
2236 [('e', 'existing', None, _('import file in patch directory')),
2237 ('n', 'name', '',
2237 ('n', 'name', '',
2238 _('name of patch file'), _('NAME')),
2238 _('name of patch file'), _('NAME')),
2239 ('f', 'force', None, _('overwrite existing files')),
2239 ('f', 'force', None, _('overwrite existing files')),
2240 ('r', 'rev', [],
2240 ('r', 'rev', [],
2241 _('place existing revisions under mq control'), _('REV')),
2241 _('place existing revisions under mq control'), _('REV')),
2242 ('g', 'git', None, _('use git extended diff format')),
2242 ('g', 'git', None, _('use git extended diff format')),
2243 ('P', 'push', None, _('qpush after importing'))],
2243 ('P', 'push', None, _('qpush after importing'))],
2244 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2244 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'))
2245 def qimport(ui, repo, *filename, **opts):
2245 def qimport(ui, repo, *filename, **opts):
2246 """import a patch or existing changeset
2246 """import a patch or existing changeset
2247
2247
2248 The patch is inserted into the series after the last applied
2248 The patch is inserted into the series after the last applied
2249 patch. If no patches have been applied, qimport prepends the patch
2249 patch. If no patches have been applied, qimport prepends the patch
2250 to the series.
2250 to the series.
2251
2251
2252 The patch will have the same name as its source file unless you
2252 The patch will have the same name as its source file unless you
2253 give it a new one with -n/--name.
2253 give it a new one with -n/--name.
2254
2254
2255 You can register an existing patch inside the patch directory with
2255 You can register an existing patch inside the patch directory with
2256 the -e/--existing flag.
2256 the -e/--existing flag.
2257
2257
2258 With -f/--force, an existing patch of the same name will be
2258 With -f/--force, an existing patch of the same name will be
2259 overwritten.
2259 overwritten.
2260
2260
2261 An existing changeset may be placed under mq control with -r/--rev
2261 An existing changeset may be placed under mq control with -r/--rev
2262 (e.g. qimport --rev . -n patch will place the current revision
2262 (e.g. qimport --rev . -n patch will place the current revision
2263 under mq control). With -g/--git, patches imported with --rev will
2263 under mq control). With -g/--git, patches imported with --rev will
2264 use the git diff format. See the diffs help topic for information
2264 use the git diff format. See the diffs help topic for information
2265 on why this is important for preserving rename/copy information
2265 on why this is important for preserving rename/copy information
2266 and permission changes. Use :hg:`qfinish` to remove changesets
2266 and permission changes. Use :hg:`qfinish` to remove changesets
2267 from mq control.
2267 from mq control.
2268
2268
2269 To import a patch from standard input, pass - as the patch file.
2269 To import a patch from standard input, pass - as the patch file.
2270 When importing from standard input, a patch name must be specified
2270 When importing from standard input, a patch name must be specified
2271 using the --name flag.
2271 using the --name flag.
2272
2272
2273 To import an existing patch while renaming it::
2273 To import an existing patch while renaming it::
2274
2274
2275 hg qimport -e existing-patch -n new-name
2275 hg qimport -e existing-patch -n new-name
2276
2276
2277 Returns 0 if import succeeded.
2277 Returns 0 if import succeeded.
2278 """
2278 """
2279 lock = repo.lock() # cause this may move phase
2279 lock = repo.lock() # cause this may move phase
2280 try:
2280 try:
2281 q = repo.mq
2281 q = repo.mq
2282 try:
2282 try:
2283 imported = q.qimport(
2283 imported = q.qimport(
2284 repo, filename, patchname=opts.get('name'),
2284 repo, filename, patchname=opts.get('name'),
2285 existing=opts.get('existing'), force=opts.get('force'),
2285 existing=opts.get('existing'), force=opts.get('force'),
2286 rev=opts.get('rev'), git=opts.get('git'))
2286 rev=opts.get('rev'), git=opts.get('git'))
2287 finally:
2287 finally:
2288 q.savedirty()
2288 q.savedirty()
2289 finally:
2289 finally:
2290 lock.release()
2290 lock.release()
2291
2291
2292 if imported and opts.get('push') and not opts.get('rev'):
2292 if imported and opts.get('push') and not opts.get('rev'):
2293 return q.push(repo, imported[-1])
2293 return q.push(repo, imported[-1])
2294 return 0
2294 return 0
2295
2295
2296 def qinit(ui, repo, create):
2296 def qinit(ui, repo, create):
2297 """initialize a new queue repository
2297 """initialize a new queue repository
2298
2298
2299 This command also creates a series file for ordering patches, and
2299 This command also creates a series file for ordering patches, and
2300 an mq-specific .hgignore file in the queue repository, to exclude
2300 an mq-specific .hgignore file in the queue repository, to exclude
2301 the status and guards files (these contain mostly transient state).
2301 the status and guards files (these contain mostly transient state).
2302
2302
2303 Returns 0 if initialization succeeded."""
2303 Returns 0 if initialization succeeded."""
2304 q = repo.mq
2304 q = repo.mq
2305 r = q.init(repo, create)
2305 r = q.init(repo, create)
2306 q.savedirty()
2306 q.savedirty()
2307 if r:
2307 if r:
2308 if not os.path.exists(r.wjoin('.hgignore')):
2308 if not os.path.exists(r.wjoin('.hgignore')):
2309 fp = r.wopener('.hgignore', 'w')
2309 fp = r.wopener('.hgignore', 'w')
2310 fp.write('^\\.hg\n')
2310 fp.write('^\\.hg\n')
2311 fp.write('^\\.mq\n')
2311 fp.write('^\\.mq\n')
2312 fp.write('syntax: glob\n')
2312 fp.write('syntax: glob\n')
2313 fp.write('status\n')
2313 fp.write('status\n')
2314 fp.write('guards\n')
2314 fp.write('guards\n')
2315 fp.close()
2315 fp.close()
2316 if not os.path.exists(r.wjoin('series')):
2316 if not os.path.exists(r.wjoin('series')):
2317 r.wopener('series', 'w').close()
2317 r.wopener('series', 'w').close()
2318 r[None].add(['.hgignore', 'series'])
2318 r[None].add(['.hgignore', 'series'])
2319 commands.add(ui, r)
2319 commands.add(ui, r)
2320 return 0
2320 return 0
2321
2321
2322 @command("^qinit",
2322 @command("^qinit",
2323 [('c', 'create-repo', None, _('create queue repository'))],
2323 [('c', 'create-repo', None, _('create queue repository'))],
2324 _('hg qinit [-c]'))
2324 _('hg qinit [-c]'))
2325 def init(ui, repo, **opts):
2325 def init(ui, repo, **opts):
2326 """init a new queue repository (DEPRECATED)
2326 """init a new queue repository (DEPRECATED)
2327
2327
2328 The queue repository is unversioned by default. If
2328 The queue repository is unversioned by default. If
2329 -c/--create-repo is specified, qinit will create a separate nested
2329 -c/--create-repo is specified, qinit will create a separate nested
2330 repository for patches (qinit -c may also be run later to convert
2330 repository for patches (qinit -c may also be run later to convert
2331 an unversioned patch repository into a versioned one). You can use
2331 an unversioned patch repository into a versioned one). You can use
2332 qcommit to commit changes to this queue repository.
2332 qcommit to commit changes to this queue repository.
2333
2333
2334 This command is deprecated. Without -c, it's implied by other relevant
2334 This command is deprecated. Without -c, it's implied by other relevant
2335 commands. With -c, use :hg:`init --mq` instead."""
2335 commands. With -c, use :hg:`init --mq` instead."""
2336 return qinit(ui, repo, create=opts.get('create_repo'))
2336 return qinit(ui, repo, create=opts.get('create_repo'))
2337
2337
2338 @command("qclone",
2338 @command("qclone",
2339 [('', 'pull', None, _('use pull protocol to copy metadata')),
2339 [('', 'pull', None, _('use pull protocol to copy metadata')),
2340 ('U', 'noupdate', None,
2340 ('U', 'noupdate', None,
2341 _('do not update the new working directories')),
2341 _('do not update the new working directories')),
2342 ('', 'uncompressed', None,
2342 ('', 'uncompressed', None,
2343 _('use uncompressed transfer (fast over LAN)')),
2343 _('use uncompressed transfer (fast over LAN)')),
2344 ('p', 'patches', '',
2344 ('p', 'patches', '',
2345 _('location of source patch repository'), _('REPO')),
2345 _('location of source patch repository'), _('REPO')),
2346 ] + commands.remoteopts,
2346 ] + commands.remoteopts,
2347 _('hg qclone [OPTION]... SOURCE [DEST]'),
2347 _('hg qclone [OPTION]... SOURCE [DEST]'),
2348 norepo=True)
2348 norepo=True)
2349 def clone(ui, source, dest=None, **opts):
2349 def clone(ui, source, dest=None, **opts):
2350 '''clone main and patch repository at same time
2350 '''clone main and patch repository at same time
2351
2351
2352 If source is local, destination will have no patches applied. If
2352 If source is local, destination will have no patches applied. If
2353 source is remote, this command can not check if patches are
2353 source is remote, this command can not check if patches are
2354 applied in source, so cannot guarantee that patches are not
2354 applied in source, so cannot guarantee that patches are not
2355 applied in destination. If you clone remote repository, be sure
2355 applied in destination. If you clone remote repository, be sure
2356 before that it has no patches applied.
2356 before that it has no patches applied.
2357
2357
2358 Source patch repository is looked for in <src>/.hg/patches by
2358 Source patch repository is looked for in <src>/.hg/patches by
2359 default. Use -p <url> to change.
2359 default. Use -p <url> to change.
2360
2360
2361 The patch directory must be a nested Mercurial repository, as
2361 The patch directory must be a nested Mercurial repository, as
2362 would be created by :hg:`init --mq`.
2362 would be created by :hg:`init --mq`.
2363
2363
2364 Return 0 on success.
2364 Return 0 on success.
2365 '''
2365 '''
2366 def patchdir(repo):
2366 def patchdir(repo):
2367 """compute a patch repo url from a repo object"""
2367 """compute a patch repo url from a repo object"""
2368 url = repo.url()
2368 url = repo.url()
2369 if url.endswith('/'):
2369 if url.endswith('/'):
2370 url = url[:-1]
2370 url = url[:-1]
2371 return url + '/.hg/patches'
2371 return url + '/.hg/patches'
2372
2372
2373 # main repo (destination and sources)
2373 # main repo (destination and sources)
2374 if dest is None:
2374 if dest is None:
2375 dest = hg.defaultdest(source)
2375 dest = hg.defaultdest(source)
2376 sr = hg.peer(ui, opts, ui.expandpath(source))
2376 sr = hg.peer(ui, opts, ui.expandpath(source))
2377
2377
2378 # patches repo (source only)
2378 # patches repo (source only)
2379 if opts.get('patches'):
2379 if opts.get('patches'):
2380 patchespath = ui.expandpath(opts.get('patches'))
2380 patchespath = ui.expandpath(opts.get('patches'))
2381 else:
2381 else:
2382 patchespath = patchdir(sr)
2382 patchespath = patchdir(sr)
2383 try:
2383 try:
2384 hg.peer(ui, opts, patchespath)
2384 hg.peer(ui, opts, patchespath)
2385 except error.RepoError:
2385 except error.RepoError:
2386 raise util.Abort(_('versioned patch repository not found'
2386 raise util.Abort(_('versioned patch repository not found'
2387 ' (see init --mq)'))
2387 ' (see init --mq)'))
2388 qbase, destrev = None, None
2388 qbase, destrev = None, None
2389 if sr.local():
2389 if sr.local():
2390 repo = sr.local()
2390 repo = sr.local()
2391 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2391 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2392 qbase = repo.mq.applied[0].node
2392 qbase = repo.mq.applied[0].node
2393 if not hg.islocal(dest):
2393 if not hg.islocal(dest):
2394 heads = set(repo.heads())
2394 heads = set(repo.heads())
2395 destrev = list(heads.difference(repo.heads(qbase)))
2395 destrev = list(heads.difference(repo.heads(qbase)))
2396 destrev.append(repo.changelog.parents(qbase)[0])
2396 destrev.append(repo.changelog.parents(qbase)[0])
2397 elif sr.capable('lookup'):
2397 elif sr.capable('lookup'):
2398 try:
2398 try:
2399 qbase = sr.lookup('qbase')
2399 qbase = sr.lookup('qbase')
2400 except error.RepoError:
2400 except error.RepoError:
2401 pass
2401 pass
2402
2402
2403 ui.note(_('cloning main repository\n'))
2403 ui.note(_('cloning main repository\n'))
2404 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2404 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2405 pull=opts.get('pull'),
2405 pull=opts.get('pull'),
2406 rev=destrev,
2406 rev=destrev,
2407 update=False,
2407 update=False,
2408 stream=opts.get('uncompressed'))
2408 stream=opts.get('uncompressed'))
2409
2409
2410 ui.note(_('cloning patch repository\n'))
2410 ui.note(_('cloning patch repository\n'))
2411 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2411 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2412 pull=opts.get('pull'), update=not opts.get('noupdate'),
2412 pull=opts.get('pull'), update=not opts.get('noupdate'),
2413 stream=opts.get('uncompressed'))
2413 stream=opts.get('uncompressed'))
2414
2414
2415 if dr.local():
2415 if dr.local():
2416 repo = dr.local()
2416 repo = dr.local()
2417 if qbase:
2417 if qbase:
2418 ui.note(_('stripping applied patches from destination '
2418 ui.note(_('stripping applied patches from destination '
2419 'repository\n'))
2419 'repository\n'))
2420 strip(ui, repo, [qbase], update=False, backup=None)
2420 strip(ui, repo, [qbase], update=False, backup=None)
2421 if not opts.get('noupdate'):
2421 if not opts.get('noupdate'):
2422 ui.note(_('updating destination repository\n'))
2422 ui.note(_('updating destination repository\n'))
2423 hg.update(repo, repo.changelog.tip())
2423 hg.update(repo, repo.changelog.tip())
2424
2424
2425 @command("qcommit|qci",
2425 @command("qcommit|qci",
2426 commands.table["^commit|ci"][1],
2426 commands.table["^commit|ci"][1],
2427 _('hg qcommit [OPTION]... [FILE]...'),
2427 _('hg qcommit [OPTION]... [FILE]...'),
2428 inferrepo=True)
2428 inferrepo=True)
2429 def commit(ui, repo, *pats, **opts):
2429 def commit(ui, repo, *pats, **opts):
2430 """commit changes in the queue repository (DEPRECATED)
2430 """commit changes in the queue repository (DEPRECATED)
2431
2431
2432 This command is deprecated; use :hg:`commit --mq` instead."""
2432 This command is deprecated; use :hg:`commit --mq` instead."""
2433 q = repo.mq
2433 q = repo.mq
2434 r = q.qrepo()
2434 r = q.qrepo()
2435 if not r:
2435 if not r:
2436 raise util.Abort('no queue repository')
2436 raise util.Abort('no queue repository')
2437 commands.commit(r.ui, r, *pats, **opts)
2437 commands.commit(r.ui, r, *pats, **opts)
2438
2438
2439 @command("qseries",
2439 @command("qseries",
2440 [('m', 'missing', None, _('print patches not in series')),
2440 [('m', 'missing', None, _('print patches not in series')),
2441 ] + seriesopts,
2441 ] + seriesopts,
2442 _('hg qseries [-ms]'))
2442 _('hg qseries [-ms]'))
2443 def series(ui, repo, **opts):
2443 def series(ui, repo, **opts):
2444 """print the entire series file
2444 """print the entire series file
2445
2445
2446 Returns 0 on success."""
2446 Returns 0 on success."""
2447 repo.mq.qseries(repo, missing=opts.get('missing'),
2447 repo.mq.qseries(repo, missing=opts.get('missing'),
2448 summary=opts.get('summary'))
2448 summary=opts.get('summary'))
2449 return 0
2449 return 0
2450
2450
2451 @command("qtop", seriesopts, _('hg qtop [-s]'))
2451 @command("qtop", seriesopts, _('hg qtop [-s]'))
2452 def top(ui, repo, **opts):
2452 def top(ui, repo, **opts):
2453 """print the name of the current patch
2453 """print the name of the current patch
2454
2454
2455 Returns 0 on success."""
2455 Returns 0 on success."""
2456 q = repo.mq
2456 q = repo.mq
2457 t = q.applied and q.seriesend(True) or 0
2457 t = q.applied and q.seriesend(True) or 0
2458 if t:
2458 if t:
2459 q.qseries(repo, start=t - 1, length=1, status='A',
2459 q.qseries(repo, start=t - 1, length=1, status='A',
2460 summary=opts.get('summary'))
2460 summary=opts.get('summary'))
2461 else:
2461 else:
2462 ui.write(_("no patches applied\n"))
2462 ui.write(_("no patches applied\n"))
2463 return 1
2463 return 1
2464
2464
2465 @command("qnext", seriesopts, _('hg qnext [-s]'))
2465 @command("qnext", seriesopts, _('hg qnext [-s]'))
2466 def next(ui, repo, **opts):
2466 def next(ui, repo, **opts):
2467 """print the name of the next pushable patch
2467 """print the name of the next pushable patch
2468
2468
2469 Returns 0 on success."""
2469 Returns 0 on success."""
2470 q = repo.mq
2470 q = repo.mq
2471 end = q.seriesend()
2471 end = q.seriesend()
2472 if end == len(q.series):
2472 if end == len(q.series):
2473 ui.write(_("all patches applied\n"))
2473 ui.write(_("all patches applied\n"))
2474 return 1
2474 return 1
2475 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2475 q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
2476
2476
2477 @command("qprev", seriesopts, _('hg qprev [-s]'))
2477 @command("qprev", seriesopts, _('hg qprev [-s]'))
2478 def prev(ui, repo, **opts):
2478 def prev(ui, repo, **opts):
2479 """print the name of the preceding applied patch
2479 """print the name of the preceding applied patch
2480
2480
2481 Returns 0 on success."""
2481 Returns 0 on success."""
2482 q = repo.mq
2482 q = repo.mq
2483 l = len(q.applied)
2483 l = len(q.applied)
2484 if l == 1:
2484 if l == 1:
2485 ui.write(_("only one patch applied\n"))
2485 ui.write(_("only one patch applied\n"))
2486 return 1
2486 return 1
2487 if not l:
2487 if not l:
2488 ui.write(_("no patches applied\n"))
2488 ui.write(_("no patches applied\n"))
2489 return 1
2489 return 1
2490 idx = q.series.index(q.applied[-2].name)
2490 idx = q.series.index(q.applied[-2].name)
2491 q.qseries(repo, start=idx, length=1, status='A',
2491 q.qseries(repo, start=idx, length=1, status='A',
2492 summary=opts.get('summary'))
2492 summary=opts.get('summary'))
2493
2493
2494 def setupheaderopts(ui, opts):
2494 def setupheaderopts(ui, opts):
2495 if not opts.get('user') and opts.get('currentuser'):
2495 if not opts.get('user') and opts.get('currentuser'):
2496 opts['user'] = ui.username()
2496 opts['user'] = ui.username()
2497 if not opts.get('date') and opts.get('currentdate'):
2497 if not opts.get('date') and opts.get('currentdate'):
2498 opts['date'] = "%d %d" % util.makedate()
2498 opts['date'] = "%d %d" % util.makedate()
2499
2499
2500 @command("^qnew",
2500 @command("^qnew",
2501 [('e', 'edit', None, _('invoke editor on commit messages')),
2501 [('e', 'edit', None, _('invoke editor on commit messages')),
2502 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2502 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2503 ('g', 'git', None, _('use git extended diff format')),
2503 ('g', 'git', None, _('use git extended diff format')),
2504 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2504 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2505 ('u', 'user', '',
2505 ('u', 'user', '',
2506 _('add "From: <USER>" to patch'), _('USER')),
2506 _('add "From: <USER>" to patch'), _('USER')),
2507 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2507 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2508 ('d', 'date', '',
2508 ('d', 'date', '',
2509 _('add "Date: <DATE>" to patch'), _('DATE'))
2509 _('add "Date: <DATE>" to patch'), _('DATE'))
2510 ] + commands.walkopts + commands.commitopts,
2510 ] + commands.walkopts + commands.commitopts,
2511 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2511 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2512 inferrepo=True)
2512 inferrepo=True)
2513 def new(ui, repo, patch, *args, **opts):
2513 def new(ui, repo, patch, *args, **opts):
2514 """create a new patch
2514 """create a new patch
2515
2515
2516 qnew creates a new patch on top of the currently-applied patch (if
2516 qnew creates a new patch on top of the currently-applied patch (if
2517 any). The patch will be initialized with any outstanding changes
2517 any). The patch will be initialized with any outstanding changes
2518 in the working directory. You may also use -I/--include,
2518 in the working directory. You may also use -I/--include,
2519 -X/--exclude, and/or a list of files after the patch name to add
2519 -X/--exclude, and/or a list of files after the patch name to add
2520 only changes to matching files to the new patch, leaving the rest
2520 only changes to matching files to the new patch, leaving the rest
2521 as uncommitted modifications.
2521 as uncommitted modifications.
2522
2522
2523 -u/--user and -d/--date can be used to set the (given) user and
2523 -u/--user and -d/--date can be used to set the (given) user and
2524 date, respectively. -U/--currentuser and -D/--currentdate set user
2524 date, respectively. -U/--currentuser and -D/--currentdate set user
2525 to current user and date to current date.
2525 to current user and date to current date.
2526
2526
2527 -e/--edit, -m/--message or -l/--logfile set the patch header as
2527 -e/--edit, -m/--message or -l/--logfile set the patch header as
2528 well as the commit message. If none is specified, the header is
2528 well as the commit message. If none is specified, the header is
2529 empty and the commit message is '[mq]: PATCH'.
2529 empty and the commit message is '[mq]: PATCH'.
2530
2530
2531 Use the -g/--git option to keep the patch in the git extended diff
2531 Use the -g/--git option to keep the patch in the git extended diff
2532 format. Read the diffs help topic for more information on why this
2532 format. Read the diffs help topic for more information on why this
2533 is important for preserving permission changes and copy/rename
2533 is important for preserving permission changes and copy/rename
2534 information.
2534 information.
2535
2535
2536 Returns 0 on successful creation of a new patch.
2536 Returns 0 on successful creation of a new patch.
2537 """
2537 """
2538 msg = cmdutil.logmessage(ui, opts)
2538 msg = cmdutil.logmessage(ui, opts)
2539 q = repo.mq
2539 q = repo.mq
2540 opts['msg'] = msg
2540 opts['msg'] = msg
2541 setupheaderopts(ui, opts)
2541 setupheaderopts(ui, opts)
2542 q.new(repo, patch, *args, **opts)
2542 q.new(repo, patch, *args, **opts)
2543 q.savedirty()
2543 q.savedirty()
2544 return 0
2544 return 0
2545
2545
2546 @command("^qrefresh",
2546 @command("^qrefresh",
2547 [('e', 'edit', None, _('invoke editor on commit messages')),
2547 [('e', 'edit', None, _('invoke editor on commit messages')),
2548 ('g', 'git', None, _('use git extended diff format')),
2548 ('g', 'git', None, _('use git extended diff format')),
2549 ('s', 'short', None,
2549 ('s', 'short', None,
2550 _('refresh only files already in the patch and specified files')),
2550 _('refresh only files already in the patch and specified files')),
2551 ('U', 'currentuser', None,
2551 ('U', 'currentuser', None,
2552 _('add/update author field in patch with current user')),
2552 _('add/update author field in patch with current user')),
2553 ('u', 'user', '',
2553 ('u', 'user', '',
2554 _('add/update author field in patch with given user'), _('USER')),
2554 _('add/update author field in patch with given user'), _('USER')),
2555 ('D', 'currentdate', None,
2555 ('D', 'currentdate', None,
2556 _('add/update date field in patch with current date')),
2556 _('add/update date field in patch with current date')),
2557 ('d', 'date', '',
2557 ('d', 'date', '',
2558 _('add/update date field in patch with given date'), _('DATE'))
2558 _('add/update date field in patch with given date'), _('DATE'))
2559 ] + commands.walkopts + commands.commitopts,
2559 ] + commands.walkopts + commands.commitopts,
2560 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2560 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2561 inferrepo=True)
2561 inferrepo=True)
2562 def refresh(ui, repo, *pats, **opts):
2562 def refresh(ui, repo, *pats, **opts):
2563 """update the current patch
2563 """update the current patch
2564
2564
2565 If any file patterns are provided, the refreshed patch will
2565 If any file patterns are provided, the refreshed patch will
2566 contain only the modifications that match those patterns; the
2566 contain only the modifications that match those patterns; the
2567 remaining modifications will remain in the working directory.
2567 remaining modifications will remain in the working directory.
2568
2568
2569 If -s/--short is specified, files currently included in the patch
2569 If -s/--short is specified, files currently included in the patch
2570 will be refreshed just like matched files and remain in the patch.
2570 will be refreshed just like matched files and remain in the patch.
2571
2571
2572 If -e/--edit is specified, Mercurial will start your configured editor for
2572 If -e/--edit is specified, Mercurial will start your configured editor for
2573 you to enter a message. In case qrefresh fails, you will find a backup of
2573 you to enter a message. In case qrefresh fails, you will find a backup of
2574 your message in ``.hg/last-message.txt``.
2574 your message in ``.hg/last-message.txt``.
2575
2575
2576 hg add/remove/copy/rename work as usual, though you might want to
2576 hg add/remove/copy/rename work as usual, though you might want to
2577 use git-style patches (-g/--git or [diff] git=1) to track copies
2577 use git-style patches (-g/--git or [diff] git=1) to track copies
2578 and renames. See the diffs help topic for more information on the
2578 and renames. See the diffs help topic for more information on the
2579 git diff format.
2579 git diff format.
2580
2580
2581 Returns 0 on success.
2581 Returns 0 on success.
2582 """
2582 """
2583 q = repo.mq
2583 q = repo.mq
2584 message = cmdutil.logmessage(ui, opts)
2584 message = cmdutil.logmessage(ui, opts)
2585 setupheaderopts(ui, opts)
2585 setupheaderopts(ui, opts)
2586 wlock = repo.wlock()
2586 wlock = repo.wlock()
2587 try:
2587 try:
2588 ret = q.refresh(repo, pats, msg=message, **opts)
2588 ret = q.refresh(repo, pats, msg=message, **opts)
2589 q.savedirty()
2589 q.savedirty()
2590 return ret
2590 return ret
2591 finally:
2591 finally:
2592 wlock.release()
2592 wlock.release()
2593
2593
2594 @command("^qdiff",
2594 @command("^qdiff",
2595 commands.diffopts + commands.diffopts2 + commands.walkopts,
2595 commands.diffopts + commands.diffopts2 + commands.walkopts,
2596 _('hg qdiff [OPTION]... [FILE]...'),
2596 _('hg qdiff [OPTION]... [FILE]...'),
2597 inferrepo=True)
2597 inferrepo=True)
2598 def diff(ui, repo, *pats, **opts):
2598 def diff(ui, repo, *pats, **opts):
2599 """diff of the current patch and subsequent modifications
2599 """diff of the current patch and subsequent modifications
2600
2600
2601 Shows a diff which includes the current patch as well as any
2601 Shows a diff which includes the current patch as well as any
2602 changes which have been made in the working directory since the
2602 changes which have been made in the working directory since the
2603 last refresh (thus showing what the current patch would become
2603 last refresh (thus showing what the current patch would become
2604 after a qrefresh).
2604 after a qrefresh).
2605
2605
2606 Use :hg:`diff` if you only want to see the changes made since the
2606 Use :hg:`diff` if you only want to see the changes made since the
2607 last qrefresh, or :hg:`export qtip` if you want to see changes
2607 last qrefresh, or :hg:`export qtip` if you want to see changes
2608 made by the current patch without including changes made since the
2608 made by the current patch without including changes made since the
2609 qrefresh.
2609 qrefresh.
2610
2610
2611 Returns 0 on success.
2611 Returns 0 on success.
2612 """
2612 """
2613 repo.mq.diff(repo, pats, opts)
2613 repo.mq.diff(repo, pats, opts)
2614 return 0
2614 return 0
2615
2615
2616 @command('qfold',
2616 @command('qfold',
2617 [('e', 'edit', None, _('invoke editor on commit messages')),
2617 [('e', 'edit', None, _('invoke editor on commit messages')),
2618 ('k', 'keep', None, _('keep folded patch files')),
2618 ('k', 'keep', None, _('keep folded patch files')),
2619 ] + commands.commitopts,
2619 ] + commands.commitopts,
2620 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2620 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'))
2621 def fold(ui, repo, *files, **opts):
2621 def fold(ui, repo, *files, **opts):
2622 """fold the named patches into the current patch
2622 """fold the named patches into the current patch
2623
2623
2624 Patches must not yet be applied. Each patch will be successively
2624 Patches must not yet be applied. Each patch will be successively
2625 applied to the current patch in the order given. If all the
2625 applied to the current patch in the order given. If all the
2626 patches apply successfully, the current patch will be refreshed
2626 patches apply successfully, the current patch will be refreshed
2627 with the new cumulative patch, and the folded patches will be
2627 with the new cumulative patch, and the folded patches will be
2628 deleted. With -k/--keep, the folded patch files will not be
2628 deleted. With -k/--keep, the folded patch files will not be
2629 removed afterwards.
2629 removed afterwards.
2630
2630
2631 The header for each folded patch will be concatenated with the
2631 The header for each folded patch will be concatenated with the
2632 current patch header, separated by a line of ``* * *``.
2632 current patch header, separated by a line of ``* * *``.
2633
2633
2634 Returns 0 on success."""
2634 Returns 0 on success."""
2635 q = repo.mq
2635 q = repo.mq
2636 if not files:
2636 if not files:
2637 raise util.Abort(_('qfold requires at least one patch name'))
2637 raise util.Abort(_('qfold requires at least one patch name'))
2638 if not q.checktoppatch(repo)[0]:
2638 if not q.checktoppatch(repo)[0]:
2639 raise util.Abort(_('no patches applied'))
2639 raise util.Abort(_('no patches applied'))
2640 q.checklocalchanges(repo)
2640 q.checklocalchanges(repo)
2641
2641
2642 message = cmdutil.logmessage(ui, opts)
2642 message = cmdutil.logmessage(ui, opts)
2643
2643
2644 parent = q.lookup('qtip')
2644 parent = q.lookup('qtip')
2645 patches = []
2645 patches = []
2646 messages = []
2646 messages = []
2647 for f in files:
2647 for f in files:
2648 p = q.lookup(f)
2648 p = q.lookup(f)
2649 if p in patches or p == parent:
2649 if p in patches or p == parent:
2650 ui.warn(_('skipping already folded patch %s\n') % p)
2650 ui.warn(_('skipping already folded patch %s\n') % p)
2651 if q.isapplied(p):
2651 if q.isapplied(p):
2652 raise util.Abort(_('qfold cannot fold already applied patch %s')
2652 raise util.Abort(_('qfold cannot fold already applied patch %s')
2653 % p)
2653 % p)
2654 patches.append(p)
2654 patches.append(p)
2655
2655
2656 for p in patches:
2656 for p in patches:
2657 if not message:
2657 if not message:
2658 ph = patchheader(q.join(p), q.plainmode)
2658 ph = patchheader(q.join(p), q.plainmode)
2659 if ph.message:
2659 if ph.message:
2660 messages.append(ph.message)
2660 messages.append(ph.message)
2661 pf = q.join(p)
2661 pf = q.join(p)
2662 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2662 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2663 if not patchsuccess:
2663 if not patchsuccess:
2664 raise util.Abort(_('error folding patch %s') % p)
2664 raise util.Abort(_('error folding patch %s') % p)
2665
2665
2666 if not message:
2666 if not message:
2667 ph = patchheader(q.join(parent), q.plainmode)
2667 ph = patchheader(q.join(parent), q.plainmode)
2668 message = ph.message
2668 message = ph.message
2669 for msg in messages:
2669 for msg in messages:
2670 if msg:
2670 if msg:
2671 if message:
2671 if message:
2672 message.append('* * *')
2672 message.append('* * *')
2673 message.extend(msg)
2673 message.extend(msg)
2674 message = '\n'.join(message)
2674 message = '\n'.join(message)
2675
2675
2676 diffopts = q.patchopts(q.diffopts(), *patches)
2676 diffopts = q.patchopts(q.diffopts(), *patches)
2677 wlock = repo.wlock()
2677 wlock = repo.wlock()
2678 try:
2678 try:
2679 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2679 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2680 editform='mq.qfold')
2680 editform='mq.qfold')
2681 q.delete(repo, patches, opts)
2681 q.delete(repo, patches, opts)
2682 q.savedirty()
2682 q.savedirty()
2683 finally:
2683 finally:
2684 wlock.release()
2684 wlock.release()
2685
2685
2686 @command("qgoto",
2686 @command("qgoto",
2687 [('', 'keep-changes', None,
2687 [('', 'keep-changes', None,
2688 _('tolerate non-conflicting local changes')),
2688 _('tolerate non-conflicting local changes')),
2689 ('f', 'force', None, _('overwrite any local changes')),
2689 ('f', 'force', None, _('overwrite any local changes')),
2690 ('', 'no-backup', None, _('do not save backup copies of files'))],
2690 ('', 'no-backup', None, _('do not save backup copies of files'))],
2691 _('hg qgoto [OPTION]... PATCH'))
2691 _('hg qgoto [OPTION]... PATCH'))
2692 def goto(ui, repo, patch, **opts):
2692 def goto(ui, repo, patch, **opts):
2693 '''push or pop patches until named patch is at top of stack
2693 '''push or pop patches until named patch is at top of stack
2694
2694
2695 Returns 0 on success.'''
2695 Returns 0 on success.'''
2696 opts = fixkeepchangesopts(ui, opts)
2696 opts = fixkeepchangesopts(ui, opts)
2697 q = repo.mq
2697 q = repo.mq
2698 patch = q.lookup(patch)
2698 patch = q.lookup(patch)
2699 nobackup = opts.get('no_backup')
2699 nobackup = opts.get('no_backup')
2700 keepchanges = opts.get('keep_changes')
2700 keepchanges = opts.get('keep_changes')
2701 if q.isapplied(patch):
2701 if q.isapplied(patch):
2702 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2702 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2703 keepchanges=keepchanges)
2703 keepchanges=keepchanges)
2704 else:
2704 else:
2705 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2705 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2706 keepchanges=keepchanges)
2706 keepchanges=keepchanges)
2707 q.savedirty()
2707 q.savedirty()
2708 return ret
2708 return ret
2709
2709
2710 @command("qguard",
2710 @command("qguard",
2711 [('l', 'list', None, _('list all patches and guards')),
2711 [('l', 'list', None, _('list all patches and guards')),
2712 ('n', 'none', None, _('drop all guards'))],
2712 ('n', 'none', None, _('drop all guards'))],
2713 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2713 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'))
2714 def guard(ui, repo, *args, **opts):
2714 def guard(ui, repo, *args, **opts):
2715 '''set or print guards for a patch
2715 '''set or print guards for a patch
2716
2716
2717 Guards control whether a patch can be pushed. A patch with no
2717 Guards control whether a patch can be pushed. A patch with no
2718 guards is always pushed. A patch with a positive guard ("+foo") is
2718 guards is always pushed. A patch with a positive guard ("+foo") is
2719 pushed only if the :hg:`qselect` command has activated it. A patch with
2719 pushed only if the :hg:`qselect` command has activated it. A patch with
2720 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2720 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2721 has activated it.
2721 has activated it.
2722
2722
2723 With no arguments, print the currently active guards.
2723 With no arguments, print the currently active guards.
2724 With arguments, set guards for the named patch.
2724 With arguments, set guards for the named patch.
2725
2725
2726 .. note::
2726 .. note::
2727
2727
2728 Specifying negative guards now requires '--'.
2728 Specifying negative guards now requires '--'.
2729
2729
2730 To set guards on another patch::
2730 To set guards on another patch::
2731
2731
2732 hg qguard other.patch -- +2.6.17 -stable
2732 hg qguard other.patch -- +2.6.17 -stable
2733
2733
2734 Returns 0 on success.
2734 Returns 0 on success.
2735 '''
2735 '''
2736 def status(idx):
2736 def status(idx):
2737 guards = q.seriesguards[idx] or ['unguarded']
2737 guards = q.seriesguards[idx] or ['unguarded']
2738 if q.series[idx] in applied:
2738 if q.series[idx] in applied:
2739 state = 'applied'
2739 state = 'applied'
2740 elif q.pushable(idx)[0]:
2740 elif q.pushable(idx)[0]:
2741 state = 'unapplied'
2741 state = 'unapplied'
2742 else:
2742 else:
2743 state = 'guarded'
2743 state = 'guarded'
2744 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2744 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2745 ui.write('%s: ' % ui.label(q.series[idx], label))
2745 ui.write('%s: ' % ui.label(q.series[idx], label))
2746
2746
2747 for i, guard in enumerate(guards):
2747 for i, guard in enumerate(guards):
2748 if guard.startswith('+'):
2748 if guard.startswith('+'):
2749 ui.write(guard, label='qguard.positive')
2749 ui.write(guard, label='qguard.positive')
2750 elif guard.startswith('-'):
2750 elif guard.startswith('-'):
2751 ui.write(guard, label='qguard.negative')
2751 ui.write(guard, label='qguard.negative')
2752 else:
2752 else:
2753 ui.write(guard, label='qguard.unguarded')
2753 ui.write(guard, label='qguard.unguarded')
2754 if i != len(guards) - 1:
2754 if i != len(guards) - 1:
2755 ui.write(' ')
2755 ui.write(' ')
2756 ui.write('\n')
2756 ui.write('\n')
2757 q = repo.mq
2757 q = repo.mq
2758 applied = set(p.name for p in q.applied)
2758 applied = set(p.name for p in q.applied)
2759 patch = None
2759 patch = None
2760 args = list(args)
2760 args = list(args)
2761 if opts.get('list'):
2761 if opts.get('list'):
2762 if args or opts.get('none'):
2762 if args or opts.get('none'):
2763 raise util.Abort(_('cannot mix -l/--list with options or '
2763 raise util.Abort(_('cannot mix -l/--list with options or '
2764 'arguments'))
2764 'arguments'))
2765 for i in xrange(len(q.series)):
2765 for i in xrange(len(q.series)):
2766 status(i)
2766 status(i)
2767 return
2767 return
2768 if not args or args[0][0:1] in '-+':
2768 if not args or args[0][0:1] in '-+':
2769 if not q.applied:
2769 if not q.applied:
2770 raise util.Abort(_('no patches applied'))
2770 raise util.Abort(_('no patches applied'))
2771 patch = q.applied[-1].name
2771 patch = q.applied[-1].name
2772 if patch is None and args[0][0:1] not in '-+':
2772 if patch is None and args[0][0:1] not in '-+':
2773 patch = args.pop(0)
2773 patch = args.pop(0)
2774 if patch is None:
2774 if patch is None:
2775 raise util.Abort(_('no patch to work with'))
2775 raise util.Abort(_('no patch to work with'))
2776 if args or opts.get('none'):
2776 if args or opts.get('none'):
2777 idx = q.findseries(patch)
2777 idx = q.findseries(patch)
2778 if idx is None:
2778 if idx is None:
2779 raise util.Abort(_('no patch named %s') % patch)
2779 raise util.Abort(_('no patch named %s') % patch)
2780 q.setguards(idx, args)
2780 q.setguards(idx, args)
2781 q.savedirty()
2781 q.savedirty()
2782 else:
2782 else:
2783 status(q.series.index(q.lookup(patch)))
2783 status(q.series.index(q.lookup(patch)))
2784
2784
2785 @command("qheader", [], _('hg qheader [PATCH]'))
2785 @command("qheader", [], _('hg qheader [PATCH]'))
2786 def header(ui, repo, patch=None):
2786 def header(ui, repo, patch=None):
2787 """print the header of the topmost or specified patch
2787 """print the header of the topmost or specified patch
2788
2788
2789 Returns 0 on success."""
2789 Returns 0 on success."""
2790 q = repo.mq
2790 q = repo.mq
2791
2791
2792 if patch:
2792 if patch:
2793 patch = q.lookup(patch)
2793 patch = q.lookup(patch)
2794 else:
2794 else:
2795 if not q.applied:
2795 if not q.applied:
2796 ui.write(_('no patches applied\n'))
2796 ui.write(_('no patches applied\n'))
2797 return 1
2797 return 1
2798 patch = q.lookup('qtip')
2798 patch = q.lookup('qtip')
2799 ph = patchheader(q.join(patch), q.plainmode)
2799 ph = patchheader(q.join(patch), q.plainmode)
2800
2800
2801 ui.write('\n'.join(ph.message) + '\n')
2801 ui.write('\n'.join(ph.message) + '\n')
2802
2802
2803 def lastsavename(path):
2803 def lastsavename(path):
2804 (directory, base) = os.path.split(path)
2804 (directory, base) = os.path.split(path)
2805 names = os.listdir(directory)
2805 names = os.listdir(directory)
2806 namere = re.compile("%s.([0-9]+)" % base)
2806 namere = re.compile("%s.([0-9]+)" % base)
2807 maxindex = None
2807 maxindex = None
2808 maxname = None
2808 maxname = None
2809 for f in names:
2809 for f in names:
2810 m = namere.match(f)
2810 m = namere.match(f)
2811 if m:
2811 if m:
2812 index = int(m.group(1))
2812 index = int(m.group(1))
2813 if maxindex is None or index > maxindex:
2813 if maxindex is None or index > maxindex:
2814 maxindex = index
2814 maxindex = index
2815 maxname = f
2815 maxname = f
2816 if maxname:
2816 if maxname:
2817 return (os.path.join(directory, maxname), maxindex)
2817 return (os.path.join(directory, maxname), maxindex)
2818 return (None, None)
2818 return (None, None)
2819
2819
2820 def savename(path):
2820 def savename(path):
2821 (last, index) = lastsavename(path)
2821 (last, index) = lastsavename(path)
2822 if last is None:
2822 if last is None:
2823 index = 0
2823 index = 0
2824 newpath = path + ".%d" % (index + 1)
2824 newpath = path + ".%d" % (index + 1)
2825 return newpath
2825 return newpath
2826
2826
2827 @command("^qpush",
2827 @command("^qpush",
2828 [('', 'keep-changes', None,
2828 [('', 'keep-changes', None,
2829 _('tolerate non-conflicting local changes')),
2829 _('tolerate non-conflicting local changes')),
2830 ('f', 'force', None, _('apply on top of local changes')),
2830 ('f', 'force', None, _('apply on top of local changes')),
2831 ('e', 'exact', None,
2831 ('e', 'exact', None,
2832 _('apply the target patch to its recorded parent')),
2832 _('apply the target patch to its recorded parent')),
2833 ('l', 'list', None, _('list patch name in commit text')),
2833 ('l', 'list', None, _('list patch name in commit text')),
2834 ('a', 'all', None, _('apply all patches')),
2834 ('a', 'all', None, _('apply all patches')),
2835 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2835 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2836 ('n', 'name', '',
2836 ('n', 'name', '',
2837 _('merge queue name (DEPRECATED)'), _('NAME')),
2837 _('merge queue name (DEPRECATED)'), _('NAME')),
2838 ('', 'move', None,
2838 ('', 'move', None,
2839 _('reorder patch series and apply only the patch')),
2839 _('reorder patch series and apply only the patch')),
2840 ('', 'no-backup', None, _('do not save backup copies of files'))],
2840 ('', 'no-backup', None, _('do not save backup copies of files'))],
2841 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2841 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'))
2842 def push(ui, repo, patch=None, **opts):
2842 def push(ui, repo, patch=None, **opts):
2843 """push the next patch onto the stack
2843 """push the next patch onto the stack
2844
2844
2845 By default, abort if the working directory contains uncommitted
2845 By default, abort if the working directory contains uncommitted
2846 changes. With --keep-changes, abort only if the uncommitted files
2846 changes. With --keep-changes, abort only if the uncommitted files
2847 overlap with patched files. With -f/--force, backup and patch over
2847 overlap with patched files. With -f/--force, backup and patch over
2848 uncommitted changes.
2848 uncommitted changes.
2849
2849
2850 Return 0 on success.
2850 Return 0 on success.
2851 """
2851 """
2852 q = repo.mq
2852 q = repo.mq
2853 mergeq = None
2853 mergeq = None
2854
2854
2855 opts = fixkeepchangesopts(ui, opts)
2855 opts = fixkeepchangesopts(ui, opts)
2856 if opts.get('merge'):
2856 if opts.get('merge'):
2857 if opts.get('name'):
2857 if opts.get('name'):
2858 newpath = repo.join(opts.get('name'))
2858 newpath = repo.join(opts.get('name'))
2859 else:
2859 else:
2860 newpath, i = lastsavename(q.path)
2860 newpath, i = lastsavename(q.path)
2861 if not newpath:
2861 if not newpath:
2862 ui.warn(_("no saved queues found, please use -n\n"))
2862 ui.warn(_("no saved queues found, please use -n\n"))
2863 return 1
2863 return 1
2864 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2864 mergeq = queue(ui, repo.baseui, repo.path, newpath)
2865 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2865 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2866 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2866 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
2867 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2867 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
2868 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2868 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
2869 keepchanges=opts.get('keep_changes'))
2869 keepchanges=opts.get('keep_changes'))
2870 return ret
2870 return ret
2871
2871
2872 @command("^qpop",
2872 @command("^qpop",
2873 [('a', 'all', None, _('pop all patches')),
2873 [('a', 'all', None, _('pop all patches')),
2874 ('n', 'name', '',
2874 ('n', 'name', '',
2875 _('queue name to pop (DEPRECATED)'), _('NAME')),
2875 _('queue name to pop (DEPRECATED)'), _('NAME')),
2876 ('', 'keep-changes', None,
2876 ('', 'keep-changes', None,
2877 _('tolerate non-conflicting local changes')),
2877 _('tolerate non-conflicting local changes')),
2878 ('f', 'force', None, _('forget any local changes to patched files')),
2878 ('f', 'force', None, _('forget any local changes to patched files')),
2879 ('', 'no-backup', None, _('do not save backup copies of files'))],
2879 ('', 'no-backup', None, _('do not save backup copies of files'))],
2880 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2880 _('hg qpop [-a] [-f] [PATCH | INDEX]'))
2881 def pop(ui, repo, patch=None, **opts):
2881 def pop(ui, repo, patch=None, **opts):
2882 """pop the current patch off the stack
2882 """pop the current patch off the stack
2883
2883
2884 Without argument, pops off the top of the patch stack. If given a
2884 Without argument, pops off the top of the patch stack. If given a
2885 patch name, keeps popping off patches until the named patch is at
2885 patch name, keeps popping off patches until the named patch is at
2886 the top of the stack.
2886 the top of the stack.
2887
2887
2888 By default, abort if the working directory contains uncommitted
2888 By default, abort if the working directory contains uncommitted
2889 changes. With --keep-changes, abort only if the uncommitted files
2889 changes. With --keep-changes, abort only if the uncommitted files
2890 overlap with patched files. With -f/--force, backup and discard
2890 overlap with patched files. With -f/--force, backup and discard
2891 changes made to such files.
2891 changes made to such files.
2892
2892
2893 Return 0 on success.
2893 Return 0 on success.
2894 """
2894 """
2895 opts = fixkeepchangesopts(ui, opts)
2895 opts = fixkeepchangesopts(ui, opts)
2896 localupdate = True
2896 localupdate = True
2897 if opts.get('name'):
2897 if opts.get('name'):
2898 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2898 q = queue(ui, repo.baseui, repo.path, repo.join(opts.get('name')))
2899 ui.warn(_('using patch queue: %s\n') % q.path)
2899 ui.warn(_('using patch queue: %s\n') % q.path)
2900 localupdate = False
2900 localupdate = False
2901 else:
2901 else:
2902 q = repo.mq
2902 q = repo.mq
2903 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2903 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
2904 all=opts.get('all'), nobackup=opts.get('no_backup'),
2904 all=opts.get('all'), nobackup=opts.get('no_backup'),
2905 keepchanges=opts.get('keep_changes'))
2905 keepchanges=opts.get('keep_changes'))
2906 q.savedirty()
2906 q.savedirty()
2907 return ret
2907 return ret
2908
2908
2909 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2909 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'))
2910 def rename(ui, repo, patch, name=None, **opts):
2910 def rename(ui, repo, patch, name=None, **opts):
2911 """rename a patch
2911 """rename a patch
2912
2912
2913 With one argument, renames the current patch to PATCH1.
2913 With one argument, renames the current patch to PATCH1.
2914 With two arguments, renames PATCH1 to PATCH2.
2914 With two arguments, renames PATCH1 to PATCH2.
2915
2915
2916 Returns 0 on success."""
2916 Returns 0 on success."""
2917 q = repo.mq
2917 q = repo.mq
2918 if not name:
2918 if not name:
2919 name = patch
2919 name = patch
2920 patch = None
2920 patch = None
2921
2921
2922 if patch:
2922 if patch:
2923 patch = q.lookup(patch)
2923 patch = q.lookup(patch)
2924 else:
2924 else:
2925 if not q.applied:
2925 if not q.applied:
2926 ui.write(_('no patches applied\n'))
2926 ui.write(_('no patches applied\n'))
2927 return
2927 return
2928 patch = q.lookup('qtip')
2928 patch = q.lookup('qtip')
2929 absdest = q.join(name)
2929 absdest = q.join(name)
2930 if os.path.isdir(absdest):
2930 if os.path.isdir(absdest):
2931 name = normname(os.path.join(name, os.path.basename(patch)))
2931 name = normname(os.path.join(name, os.path.basename(patch)))
2932 absdest = q.join(name)
2932 absdest = q.join(name)
2933 q.checkpatchname(name)
2933 q.checkpatchname(name)
2934
2934
2935 ui.note(_('renaming %s to %s\n') % (patch, name))
2935 ui.note(_('renaming %s to %s\n') % (patch, name))
2936 i = q.findseries(patch)
2936 i = q.findseries(patch)
2937 guards = q.guard_re.findall(q.fullseries[i])
2937 guards = q.guard_re.findall(q.fullseries[i])
2938 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2938 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
2939 q.parseseries()
2939 q.parseseries()
2940 q.seriesdirty = True
2940 q.seriesdirty = True
2941
2941
2942 info = q.isapplied(patch)
2942 info = q.isapplied(patch)
2943 if info:
2943 if info:
2944 q.applied[info[0]] = statusentry(info[1], name)
2944 q.applied[info[0]] = statusentry(info[1], name)
2945 q.applieddirty = True
2945 q.applieddirty = True
2946
2946
2947 destdir = os.path.dirname(absdest)
2947 destdir = os.path.dirname(absdest)
2948 if not os.path.isdir(destdir):
2948 if not os.path.isdir(destdir):
2949 os.makedirs(destdir)
2949 os.makedirs(destdir)
2950 util.rename(q.join(patch), absdest)
2950 util.rename(q.join(patch), absdest)
2951 r = q.qrepo()
2951 r = q.qrepo()
2952 if r and patch in r.dirstate:
2952 if r and patch in r.dirstate:
2953 wctx = r[None]
2953 wctx = r[None]
2954 wlock = r.wlock()
2954 wlock = r.wlock()
2955 try:
2955 try:
2956 if r.dirstate[patch] == 'a':
2956 if r.dirstate[patch] == 'a':
2957 r.dirstate.drop(patch)
2957 r.dirstate.drop(patch)
2958 r.dirstate.add(name)
2958 r.dirstate.add(name)
2959 else:
2959 else:
2960 wctx.copy(patch, name)
2960 wctx.copy(patch, name)
2961 wctx.forget([patch])
2961 wctx.forget([patch])
2962 finally:
2962 finally:
2963 wlock.release()
2963 wlock.release()
2964
2964
2965 q.savedirty()
2965 q.savedirty()
2966
2966
2967 @command("qrestore",
2967 @command("qrestore",
2968 [('d', 'delete', None, _('delete save entry')),
2968 [('d', 'delete', None, _('delete save entry')),
2969 ('u', 'update', None, _('update queue working directory'))],
2969 ('u', 'update', None, _('update queue working directory'))],
2970 _('hg qrestore [-d] [-u] REV'))
2970 _('hg qrestore [-d] [-u] REV'))
2971 def restore(ui, repo, rev, **opts):
2971 def restore(ui, repo, rev, **opts):
2972 """restore the queue state saved by a revision (DEPRECATED)
2972 """restore the queue state saved by a revision (DEPRECATED)
2973
2973
2974 This command is deprecated, use :hg:`rebase` instead."""
2974 This command is deprecated, use :hg:`rebase` instead."""
2975 rev = repo.lookup(rev)
2975 rev = repo.lookup(rev)
2976 q = repo.mq
2976 q = repo.mq
2977 q.restore(repo, rev, delete=opts.get('delete'),
2977 q.restore(repo, rev, delete=opts.get('delete'),
2978 qupdate=opts.get('update'))
2978 qupdate=opts.get('update'))
2979 q.savedirty()
2979 q.savedirty()
2980 return 0
2980 return 0
2981
2981
2982 @command("qsave",
2982 @command("qsave",
2983 [('c', 'copy', None, _('copy patch directory')),
2983 [('c', 'copy', None, _('copy patch directory')),
2984 ('n', 'name', '',
2984 ('n', 'name', '',
2985 _('copy directory name'), _('NAME')),
2985 _('copy directory name'), _('NAME')),
2986 ('e', 'empty', None, _('clear queue status file')),
2986 ('e', 'empty', None, _('clear queue status file')),
2987 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2987 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2988 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2988 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'))
2989 def save(ui, repo, **opts):
2989 def save(ui, repo, **opts):
2990 """save current queue state (DEPRECATED)
2990 """save current queue state (DEPRECATED)
2991
2991
2992 This command is deprecated, use :hg:`rebase` instead."""
2992 This command is deprecated, use :hg:`rebase` instead."""
2993 q = repo.mq
2993 q = repo.mq
2994 message = cmdutil.logmessage(ui, opts)
2994 message = cmdutil.logmessage(ui, opts)
2995 ret = q.save(repo, msg=message)
2995 ret = q.save(repo, msg=message)
2996 if ret:
2996 if ret:
2997 return ret
2997 return ret
2998 q.savedirty() # save to .hg/patches before copying
2998 q.savedirty() # save to .hg/patches before copying
2999 if opts.get('copy'):
2999 if opts.get('copy'):
3000 path = q.path
3000 path = q.path
3001 if opts.get('name'):
3001 if opts.get('name'):
3002 newpath = os.path.join(q.basepath, opts.get('name'))
3002 newpath = os.path.join(q.basepath, opts.get('name'))
3003 if os.path.exists(newpath):
3003 if os.path.exists(newpath):
3004 if not os.path.isdir(newpath):
3004 if not os.path.isdir(newpath):
3005 raise util.Abort(_('destination %s exists and is not '
3005 raise util.Abort(_('destination %s exists and is not '
3006 'a directory') % newpath)
3006 'a directory') % newpath)
3007 if not opts.get('force'):
3007 if not opts.get('force'):
3008 raise util.Abort(_('destination %s exists, '
3008 raise util.Abort(_('destination %s exists, '
3009 'use -f to force') % newpath)
3009 'use -f to force') % newpath)
3010 else:
3010 else:
3011 newpath = savename(path)
3011 newpath = savename(path)
3012 ui.warn(_("copy %s to %s\n") % (path, newpath))
3012 ui.warn(_("copy %s to %s\n") % (path, newpath))
3013 util.copyfiles(path, newpath)
3013 util.copyfiles(path, newpath)
3014 if opts.get('empty'):
3014 if opts.get('empty'):
3015 del q.applied[:]
3015 del q.applied[:]
3016 q.applieddirty = True
3016 q.applieddirty = True
3017 q.savedirty()
3017 q.savedirty()
3018 return 0
3018 return 0
3019
3019
3020
3020
3021 @command("qselect",
3021 @command("qselect",
3022 [('n', 'none', None, _('disable all guards')),
3022 [('n', 'none', None, _('disable all guards')),
3023 ('s', 'series', None, _('list all guards in series file')),
3023 ('s', 'series', None, _('list all guards in series file')),
3024 ('', 'pop', None, _('pop to before first guarded applied patch')),
3024 ('', 'pop', None, _('pop to before first guarded applied patch')),
3025 ('', 'reapply', None, _('pop, then reapply patches'))],
3025 ('', 'reapply', None, _('pop, then reapply patches'))],
3026 _('hg qselect [OPTION]... [GUARD]...'))
3026 _('hg qselect [OPTION]... [GUARD]...'))
3027 def select(ui, repo, *args, **opts):
3027 def select(ui, repo, *args, **opts):
3028 '''set or print guarded patches to push
3028 '''set or print guarded patches to push
3029
3029
3030 Use the :hg:`qguard` command to set or print guards on patch, then use
3030 Use the :hg:`qguard` command to set or print guards on patch, then use
3031 qselect to tell mq which guards to use. A patch will be pushed if
3031 qselect to tell mq which guards to use. A patch will be pushed if
3032 it has no guards or any positive guards match the currently
3032 it has no guards or any positive guards match the currently
3033 selected guard, but will not be pushed if any negative guards
3033 selected guard, but will not be pushed if any negative guards
3034 match the current guard. For example::
3034 match the current guard. For example::
3035
3035
3036 qguard foo.patch -- -stable (negative guard)
3036 qguard foo.patch -- -stable (negative guard)
3037 qguard bar.patch +stable (positive guard)
3037 qguard bar.patch +stable (positive guard)
3038 qselect stable
3038 qselect stable
3039
3039
3040 This activates the "stable" guard. mq will skip foo.patch (because
3040 This activates the "stable" guard. mq will skip foo.patch (because
3041 it has a negative match) but push bar.patch (because it has a
3041 it has a negative match) but push bar.patch (because it has a
3042 positive match).
3042 positive match).
3043
3043
3044 With no arguments, prints the currently active guards.
3044 With no arguments, prints the currently active guards.
3045 With one argument, sets the active guard.
3045 With one argument, sets the active guard.
3046
3046
3047 Use -n/--none to deactivate guards (no other arguments needed).
3047 Use -n/--none to deactivate guards (no other arguments needed).
3048 When no guards are active, patches with positive guards are
3048 When no guards are active, patches with positive guards are
3049 skipped and patches with negative guards are pushed.
3049 skipped and patches with negative guards are pushed.
3050
3050
3051 qselect can change the guards on applied patches. It does not pop
3051 qselect can change the guards on applied patches. It does not pop
3052 guarded patches by default. Use --pop to pop back to the last
3052 guarded patches by default. Use --pop to pop back to the last
3053 applied patch that is not guarded. Use --reapply (which implies
3053 applied patch that is not guarded. Use --reapply (which implies
3054 --pop) to push back to the current patch afterwards, but skip
3054 --pop) to push back to the current patch afterwards, but skip
3055 guarded patches.
3055 guarded patches.
3056
3056
3057 Use -s/--series to print a list of all guards in the series file
3057 Use -s/--series to print a list of all guards in the series file
3058 (no other arguments needed). Use -v for more information.
3058 (no other arguments needed). Use -v for more information.
3059
3059
3060 Returns 0 on success.'''
3060 Returns 0 on success.'''
3061
3061
3062 q = repo.mq
3062 q = repo.mq
3063 guards = q.active()
3063 guards = q.active()
3064 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3064 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3065 if args or opts.get('none'):
3065 if args or opts.get('none'):
3066 old_unapplied = q.unapplied(repo)
3066 old_unapplied = q.unapplied(repo)
3067 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3067 old_guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3068 q.setactive(args)
3068 q.setactive(args)
3069 q.savedirty()
3069 q.savedirty()
3070 if not args:
3070 if not args:
3071 ui.status(_('guards deactivated\n'))
3071 ui.status(_('guards deactivated\n'))
3072 if not opts.get('pop') and not opts.get('reapply'):
3072 if not opts.get('pop') and not opts.get('reapply'):
3073 unapplied = q.unapplied(repo)
3073 unapplied = q.unapplied(repo)
3074 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3074 guarded = [i for i in xrange(len(q.applied)) if not pushable(i)]
3075 if len(unapplied) != len(old_unapplied):
3075 if len(unapplied) != len(old_unapplied):
3076 ui.status(_('number of unguarded, unapplied patches has '
3076 ui.status(_('number of unguarded, unapplied patches has '
3077 'changed from %d to %d\n') %
3077 'changed from %d to %d\n') %
3078 (len(old_unapplied), len(unapplied)))
3078 (len(old_unapplied), len(unapplied)))
3079 if len(guarded) != len(old_guarded):
3079 if len(guarded) != len(old_guarded):
3080 ui.status(_('number of guarded, applied patches has changed '
3080 ui.status(_('number of guarded, applied patches has changed '
3081 'from %d to %d\n') %
3081 'from %d to %d\n') %
3082 (len(old_guarded), len(guarded)))
3082 (len(old_guarded), len(guarded)))
3083 elif opts.get('series'):
3083 elif opts.get('series'):
3084 guards = {}
3084 guards = {}
3085 noguards = 0
3085 noguards = 0
3086 for gs in q.seriesguards:
3086 for gs in q.seriesguards:
3087 if not gs:
3087 if not gs:
3088 noguards += 1
3088 noguards += 1
3089 for g in gs:
3089 for g in gs:
3090 guards.setdefault(g, 0)
3090 guards.setdefault(g, 0)
3091 guards[g] += 1
3091 guards[g] += 1
3092 if ui.verbose:
3092 if ui.verbose:
3093 guards['NONE'] = noguards
3093 guards['NONE'] = noguards
3094 guards = guards.items()
3094 guards = guards.items()
3095 guards.sort(key=lambda x: x[0][1:])
3095 guards.sort(key=lambda x: x[0][1:])
3096 if guards:
3096 if guards:
3097 ui.note(_('guards in series file:\n'))
3097 ui.note(_('guards in series file:\n'))
3098 for guard, count in guards:
3098 for guard, count in guards:
3099 ui.note('%2d ' % count)
3099 ui.note('%2d ' % count)
3100 ui.write(guard, '\n')
3100 ui.write(guard, '\n')
3101 else:
3101 else:
3102 ui.note(_('no guards in series file\n'))
3102 ui.note(_('no guards in series file\n'))
3103 else:
3103 else:
3104 if guards:
3104 if guards:
3105 ui.note(_('active guards:\n'))
3105 ui.note(_('active guards:\n'))
3106 for g in guards:
3106 for g in guards:
3107 ui.write(g, '\n')
3107 ui.write(g, '\n')
3108 else:
3108 else:
3109 ui.write(_('no active guards\n'))
3109 ui.write(_('no active guards\n'))
3110 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3110 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3111 popped = False
3111 popped = False
3112 if opts.get('pop') or opts.get('reapply'):
3112 if opts.get('pop') or opts.get('reapply'):
3113 for i in xrange(len(q.applied)):
3113 for i in xrange(len(q.applied)):
3114 if not pushable(i):
3114 if not pushable(i):
3115 ui.status(_('popping guarded patches\n'))
3115 ui.status(_('popping guarded patches\n'))
3116 popped = True
3116 popped = True
3117 if i == 0:
3117 if i == 0:
3118 q.pop(repo, all=True)
3118 q.pop(repo, all=True)
3119 else:
3119 else:
3120 q.pop(repo, q.applied[i - 1].name)
3120 q.pop(repo, q.applied[i - 1].name)
3121 break
3121 break
3122 if popped:
3122 if popped:
3123 try:
3123 try:
3124 if reapply:
3124 if reapply:
3125 ui.status(_('reapplying unguarded patches\n'))
3125 ui.status(_('reapplying unguarded patches\n'))
3126 q.push(repo, reapply)
3126 q.push(repo, reapply)
3127 finally:
3127 finally:
3128 q.savedirty()
3128 q.savedirty()
3129
3129
3130 @command("qfinish",
3130 @command("qfinish",
3131 [('a', 'applied', None, _('finish all applied changesets'))],
3131 [('a', 'applied', None, _('finish all applied changesets'))],
3132 _('hg qfinish [-a] [REV]...'))
3132 _('hg qfinish [-a] [REV]...'))
3133 def finish(ui, repo, *revrange, **opts):
3133 def finish(ui, repo, *revrange, **opts):
3134 """move applied patches into repository history
3134 """move applied patches into repository history
3135
3135
3136 Finishes the specified revisions (corresponding to applied
3136 Finishes the specified revisions (corresponding to applied
3137 patches) by moving them out of mq control into regular repository
3137 patches) by moving them out of mq control into regular repository
3138 history.
3138 history.
3139
3139
3140 Accepts a revision range or the -a/--applied option. If --applied
3140 Accepts a revision range or the -a/--applied option. If --applied
3141 is specified, all applied mq revisions are removed from mq
3141 is specified, all applied mq revisions are removed from mq
3142 control. Otherwise, the given revisions must be at the base of the
3142 control. Otherwise, the given revisions must be at the base of the
3143 stack of applied patches.
3143 stack of applied patches.
3144
3144
3145 This can be especially useful if your changes have been applied to
3145 This can be especially useful if your changes have been applied to
3146 an upstream repository, or if you are about to push your changes
3146 an upstream repository, or if you are about to push your changes
3147 to upstream.
3147 to upstream.
3148
3148
3149 Returns 0 on success.
3149 Returns 0 on success.
3150 """
3150 """
3151 if not opts.get('applied') and not revrange:
3151 if not opts.get('applied') and not revrange:
3152 raise util.Abort(_('no revisions specified'))
3152 raise util.Abort(_('no revisions specified'))
3153 elif opts.get('applied'):
3153 elif opts.get('applied'):
3154 revrange = ('qbase::qtip',) + revrange
3154 revrange = ('qbase::qtip',) + revrange
3155
3155
3156 q = repo.mq
3156 q = repo.mq
3157 if not q.applied:
3157 if not q.applied:
3158 ui.status(_('no patches applied\n'))
3158 ui.status(_('no patches applied\n'))
3159 return 0
3159 return 0
3160
3160
3161 revs = scmutil.revrange(repo, revrange)
3161 revs = scmutil.revrange(repo, revrange)
3162 if repo['.'].rev() in revs and repo[None].files():
3162 if repo['.'].rev() in revs and repo[None].files():
3163 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3163 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3164 # queue.finish may changes phases but leave the responsibility to lock the
3164 # queue.finish may changes phases but leave the responsibility to lock the
3165 # repo to the caller to avoid deadlock with wlock. This command code is
3165 # repo to the caller to avoid deadlock with wlock. This command code is
3166 # responsibility for this locking.
3166 # responsibility for this locking.
3167 lock = repo.lock()
3167 lock = repo.lock()
3168 try:
3168 try:
3169 q.finish(repo, revs)
3169 q.finish(repo, revs)
3170 q.savedirty()
3170 q.savedirty()
3171 finally:
3171 finally:
3172 lock.release()
3172 lock.release()
3173 return 0
3173 return 0
3174
3174
3175 @command("qqueue",
3175 @command("qqueue",
3176 [('l', 'list', False, _('list all available queues')),
3176 [('l', 'list', False, _('list all available queues')),
3177 ('', 'active', False, _('print name of active queue')),
3177 ('', 'active', False, _('print name of active queue')),
3178 ('c', 'create', False, _('create new queue')),
3178 ('c', 'create', False, _('create new queue')),
3179 ('', 'rename', False, _('rename active queue')),
3179 ('', 'rename', False, _('rename active queue')),
3180 ('', 'delete', False, _('delete reference to queue')),
3180 ('', 'delete', False, _('delete reference to queue')),
3181 ('', 'purge', False, _('delete queue, and remove patch dir')),
3181 ('', 'purge', False, _('delete queue, and remove patch dir')),
3182 ],
3182 ],
3183 _('[OPTION] [QUEUE]'))
3183 _('[OPTION] [QUEUE]'))
3184 def qqueue(ui, repo, name=None, **opts):
3184 def qqueue(ui, repo, name=None, **opts):
3185 '''manage multiple patch queues
3185 '''manage multiple patch queues
3186
3186
3187 Supports switching between different patch queues, as well as creating
3187 Supports switching between different patch queues, as well as creating
3188 new patch queues and deleting existing ones.
3188 new patch queues and deleting existing ones.
3189
3189
3190 Omitting a queue name or specifying -l/--list will show you the registered
3190 Omitting a queue name or specifying -l/--list will show you the registered
3191 queues - by default the "normal" patches queue is registered. The currently
3191 queues - by default the "normal" patches queue is registered. The currently
3192 active queue will be marked with "(active)". Specifying --active will print
3192 active queue will be marked with "(active)". Specifying --active will print
3193 only the name of the active queue.
3193 only the name of the active queue.
3194
3194
3195 To create a new queue, use -c/--create. The queue is automatically made
3195 To create a new queue, use -c/--create. The queue is automatically made
3196 active, except in the case where there are applied patches from the
3196 active, except in the case where there are applied patches from the
3197 currently active queue in the repository. Then the queue will only be
3197 currently active queue in the repository. Then the queue will only be
3198 created and switching will fail.
3198 created and switching will fail.
3199
3199
3200 To delete an existing queue, use --delete. You cannot delete the currently
3200 To delete an existing queue, use --delete. You cannot delete the currently
3201 active queue.
3201 active queue.
3202
3202
3203 Returns 0 on success.
3203 Returns 0 on success.
3204 '''
3204 '''
3205 q = repo.mq
3205 q = repo.mq
3206 _defaultqueue = 'patches'
3206 _defaultqueue = 'patches'
3207 _allqueues = 'patches.queues'
3207 _allqueues = 'patches.queues'
3208 _activequeue = 'patches.queue'
3208 _activequeue = 'patches.queue'
3209
3209
3210 def _getcurrent():
3210 def _getcurrent():
3211 cur = os.path.basename(q.path)
3211 cur = os.path.basename(q.path)
3212 if cur.startswith('patches-'):
3212 if cur.startswith('patches-'):
3213 cur = cur[8:]
3213 cur = cur[8:]
3214 return cur
3214 return cur
3215
3215
3216 def _noqueues():
3216 def _noqueues():
3217 try:
3217 try:
3218 fh = repo.opener(_allqueues, 'r')
3218 fh = repo.vfs(_allqueues, 'r')
3219 fh.close()
3219 fh.close()
3220 except IOError:
3220 except IOError:
3221 return True
3221 return True
3222
3222
3223 return False
3223 return False
3224
3224
3225 def _getqueues():
3225 def _getqueues():
3226 current = _getcurrent()
3226 current = _getcurrent()
3227
3227
3228 try:
3228 try:
3229 fh = repo.opener(_allqueues, 'r')
3229 fh = repo.vfs(_allqueues, 'r')
3230 queues = [queue.strip() for queue in fh if queue.strip()]
3230 queues = [queue.strip() for queue in fh if queue.strip()]
3231 fh.close()
3231 fh.close()
3232 if current not in queues:
3232 if current not in queues:
3233 queues.append(current)
3233 queues.append(current)
3234 except IOError:
3234 except IOError:
3235 queues = [_defaultqueue]
3235 queues = [_defaultqueue]
3236
3236
3237 return sorted(queues)
3237 return sorted(queues)
3238
3238
3239 def _setactive(name):
3239 def _setactive(name):
3240 if q.applied:
3240 if q.applied:
3241 raise util.Abort(_('new queue created, but cannot make active '
3241 raise util.Abort(_('new queue created, but cannot make active '
3242 'as patches are applied'))
3242 'as patches are applied'))
3243 _setactivenocheck(name)
3243 _setactivenocheck(name)
3244
3244
3245 def _setactivenocheck(name):
3245 def _setactivenocheck(name):
3246 fh = repo.opener(_activequeue, 'w')
3246 fh = repo.vfs(_activequeue, 'w')
3247 if name != 'patches':
3247 if name != 'patches':
3248 fh.write(name)
3248 fh.write(name)
3249 fh.close()
3249 fh.close()
3250
3250
3251 def _addqueue(name):
3251 def _addqueue(name):
3252 fh = repo.opener(_allqueues, 'a')
3252 fh = repo.vfs(_allqueues, 'a')
3253 fh.write('%s\n' % (name,))
3253 fh.write('%s\n' % (name,))
3254 fh.close()
3254 fh.close()
3255
3255
3256 def _queuedir(name):
3256 def _queuedir(name):
3257 if name == 'patches':
3257 if name == 'patches':
3258 return repo.join('patches')
3258 return repo.join('patches')
3259 else:
3259 else:
3260 return repo.join('patches-' + name)
3260 return repo.join('patches-' + name)
3261
3261
3262 def _validname(name):
3262 def _validname(name):
3263 for n in name:
3263 for n in name:
3264 if n in ':\\/.':
3264 if n in ':\\/.':
3265 return False
3265 return False
3266 return True
3266 return True
3267
3267
3268 def _delete(name):
3268 def _delete(name):
3269 if name not in existing:
3269 if name not in existing:
3270 raise util.Abort(_('cannot delete queue that does not exist'))
3270 raise util.Abort(_('cannot delete queue that does not exist'))
3271
3271
3272 current = _getcurrent()
3272 current = _getcurrent()
3273
3273
3274 if name == current:
3274 if name == current:
3275 raise util.Abort(_('cannot delete currently active queue'))
3275 raise util.Abort(_('cannot delete currently active queue'))
3276
3276
3277 fh = repo.opener('patches.queues.new', 'w')
3277 fh = repo.vfs('patches.queues.new', 'w')
3278 for queue in existing:
3278 for queue in existing:
3279 if queue == name:
3279 if queue == name:
3280 continue
3280 continue
3281 fh.write('%s\n' % (queue,))
3281 fh.write('%s\n' % (queue,))
3282 fh.close()
3282 fh.close()
3283 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3283 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3284
3284
3285 if not name or opts.get('list') or opts.get('active'):
3285 if not name or opts.get('list') or opts.get('active'):
3286 current = _getcurrent()
3286 current = _getcurrent()
3287 if opts.get('active'):
3287 if opts.get('active'):
3288 ui.write('%s\n' % (current,))
3288 ui.write('%s\n' % (current,))
3289 return
3289 return
3290 for queue in _getqueues():
3290 for queue in _getqueues():
3291 ui.write('%s' % (queue,))
3291 ui.write('%s' % (queue,))
3292 if queue == current and not ui.quiet:
3292 if queue == current and not ui.quiet:
3293 ui.write(_(' (active)\n'))
3293 ui.write(_(' (active)\n'))
3294 else:
3294 else:
3295 ui.write('\n')
3295 ui.write('\n')
3296 return
3296 return
3297
3297
3298 if not _validname(name):
3298 if not _validname(name):
3299 raise util.Abort(
3299 raise util.Abort(
3300 _('invalid queue name, may not contain the characters ":\\/."'))
3300 _('invalid queue name, may not contain the characters ":\\/."'))
3301
3301
3302 existing = _getqueues()
3302 existing = _getqueues()
3303
3303
3304 if opts.get('create'):
3304 if opts.get('create'):
3305 if name in existing:
3305 if name in existing:
3306 raise util.Abort(_('queue "%s" already exists') % name)
3306 raise util.Abort(_('queue "%s" already exists') % name)
3307 if _noqueues():
3307 if _noqueues():
3308 _addqueue(_defaultqueue)
3308 _addqueue(_defaultqueue)
3309 _addqueue(name)
3309 _addqueue(name)
3310 _setactive(name)
3310 _setactive(name)
3311 elif opts.get('rename'):
3311 elif opts.get('rename'):
3312 current = _getcurrent()
3312 current = _getcurrent()
3313 if name == current:
3313 if name == current:
3314 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3314 raise util.Abort(_('can\'t rename "%s" to its current name') % name)
3315 if name in existing:
3315 if name in existing:
3316 raise util.Abort(_('queue "%s" already exists') % name)
3316 raise util.Abort(_('queue "%s" already exists') % name)
3317
3317
3318 olddir = _queuedir(current)
3318 olddir = _queuedir(current)
3319 newdir = _queuedir(name)
3319 newdir = _queuedir(name)
3320
3320
3321 if os.path.exists(newdir):
3321 if os.path.exists(newdir):
3322 raise util.Abort(_('non-queue directory "%s" already exists') %
3322 raise util.Abort(_('non-queue directory "%s" already exists') %
3323 newdir)
3323 newdir)
3324
3324
3325 fh = repo.opener('patches.queues.new', 'w')
3325 fh = repo.vfs('patches.queues.new', 'w')
3326 for queue in existing:
3326 for queue in existing:
3327 if queue == current:
3327 if queue == current:
3328 fh.write('%s\n' % (name,))
3328 fh.write('%s\n' % (name,))
3329 if os.path.exists(olddir):
3329 if os.path.exists(olddir):
3330 util.rename(olddir, newdir)
3330 util.rename(olddir, newdir)
3331 else:
3331 else:
3332 fh.write('%s\n' % (queue,))
3332 fh.write('%s\n' % (queue,))
3333 fh.close()
3333 fh.close()
3334 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3334 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
3335 _setactivenocheck(name)
3335 _setactivenocheck(name)
3336 elif opts.get('delete'):
3336 elif opts.get('delete'):
3337 _delete(name)
3337 _delete(name)
3338 elif opts.get('purge'):
3338 elif opts.get('purge'):
3339 if name in existing:
3339 if name in existing:
3340 _delete(name)
3340 _delete(name)
3341 qdir = _queuedir(name)
3341 qdir = _queuedir(name)
3342 if os.path.exists(qdir):
3342 if os.path.exists(qdir):
3343 shutil.rmtree(qdir)
3343 shutil.rmtree(qdir)
3344 else:
3344 else:
3345 if name not in existing:
3345 if name not in existing:
3346 raise util.Abort(_('use --create to create a new queue'))
3346 raise util.Abort(_('use --create to create a new queue'))
3347 _setactive(name)
3347 _setactive(name)
3348
3348
3349 def mqphasedefaults(repo, roots):
3349 def mqphasedefaults(repo, roots):
3350 """callback used to set mq changeset as secret when no phase data exists"""
3350 """callback used to set mq changeset as secret when no phase data exists"""
3351 if repo.mq.applied:
3351 if repo.mq.applied:
3352 if repo.ui.configbool('mq', 'secret', False):
3352 if repo.ui.configbool('mq', 'secret', False):
3353 mqphase = phases.secret
3353 mqphase = phases.secret
3354 else:
3354 else:
3355 mqphase = phases.draft
3355 mqphase = phases.draft
3356 qbase = repo[repo.mq.applied[0].node]
3356 qbase = repo[repo.mq.applied[0].node]
3357 roots[mqphase].add(qbase.node())
3357 roots[mqphase].add(qbase.node())
3358 return roots
3358 return roots
3359
3359
3360 def reposetup(ui, repo):
3360 def reposetup(ui, repo):
3361 class mqrepo(repo.__class__):
3361 class mqrepo(repo.__class__):
3362 @localrepo.unfilteredpropertycache
3362 @localrepo.unfilteredpropertycache
3363 def mq(self):
3363 def mq(self):
3364 return queue(self.ui, self.baseui, self.path)
3364 return queue(self.ui, self.baseui, self.path)
3365
3365
3366 def invalidateall(self):
3366 def invalidateall(self):
3367 super(mqrepo, self).invalidateall()
3367 super(mqrepo, self).invalidateall()
3368 if localrepo.hasunfilteredcache(self, 'mq'):
3368 if localrepo.hasunfilteredcache(self, 'mq'):
3369 # recreate mq in case queue path was changed
3369 # recreate mq in case queue path was changed
3370 delattr(self.unfiltered(), 'mq')
3370 delattr(self.unfiltered(), 'mq')
3371
3371
3372 def abortifwdirpatched(self, errmsg, force=False):
3372 def abortifwdirpatched(self, errmsg, force=False):
3373 if self.mq.applied and self.mq.checkapplied and not force:
3373 if self.mq.applied and self.mq.checkapplied and not force:
3374 parents = self.dirstate.parents()
3374 parents = self.dirstate.parents()
3375 patches = [s.node for s in self.mq.applied]
3375 patches = [s.node for s in self.mq.applied]
3376 if parents[0] in patches or parents[1] in patches:
3376 if parents[0] in patches or parents[1] in patches:
3377 raise util.Abort(errmsg)
3377 raise util.Abort(errmsg)
3378
3378
3379 def commit(self, text="", user=None, date=None, match=None,
3379 def commit(self, text="", user=None, date=None, match=None,
3380 force=False, editor=False, extra={}):
3380 force=False, editor=False, extra={}):
3381 self.abortifwdirpatched(
3381 self.abortifwdirpatched(
3382 _('cannot commit over an applied mq patch'),
3382 _('cannot commit over an applied mq patch'),
3383 force)
3383 force)
3384
3384
3385 return super(mqrepo, self).commit(text, user, date, match, force,
3385 return super(mqrepo, self).commit(text, user, date, match, force,
3386 editor, extra)
3386 editor, extra)
3387
3387
3388 def checkpush(self, pushop):
3388 def checkpush(self, pushop):
3389 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3389 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3390 outapplied = [e.node for e in self.mq.applied]
3390 outapplied = [e.node for e in self.mq.applied]
3391 if pushop.revs:
3391 if pushop.revs:
3392 # Assume applied patches have no non-patch descendants and
3392 # Assume applied patches have no non-patch descendants and
3393 # are not on remote already. Filtering any changeset not
3393 # are not on remote already. Filtering any changeset not
3394 # pushed.
3394 # pushed.
3395 heads = set(pushop.revs)
3395 heads = set(pushop.revs)
3396 for node in reversed(outapplied):
3396 for node in reversed(outapplied):
3397 if node in heads:
3397 if node in heads:
3398 break
3398 break
3399 else:
3399 else:
3400 outapplied.pop()
3400 outapplied.pop()
3401 # looking for pushed and shared changeset
3401 # looking for pushed and shared changeset
3402 for node in outapplied:
3402 for node in outapplied:
3403 if self[node].phase() < phases.secret:
3403 if self[node].phase() < phases.secret:
3404 raise util.Abort(_('source has mq patches applied'))
3404 raise util.Abort(_('source has mq patches applied'))
3405 # no non-secret patches pushed
3405 # no non-secret patches pushed
3406 super(mqrepo, self).checkpush(pushop)
3406 super(mqrepo, self).checkpush(pushop)
3407
3407
3408 def _findtags(self):
3408 def _findtags(self):
3409 '''augment tags from base class with patch tags'''
3409 '''augment tags from base class with patch tags'''
3410 result = super(mqrepo, self)._findtags()
3410 result = super(mqrepo, self)._findtags()
3411
3411
3412 q = self.mq
3412 q = self.mq
3413 if not q.applied:
3413 if not q.applied:
3414 return result
3414 return result
3415
3415
3416 mqtags = [(patch.node, patch.name) for patch in q.applied]
3416 mqtags = [(patch.node, patch.name) for patch in q.applied]
3417
3417
3418 try:
3418 try:
3419 # for now ignore filtering business
3419 # for now ignore filtering business
3420 self.unfiltered().changelog.rev(mqtags[-1][0])
3420 self.unfiltered().changelog.rev(mqtags[-1][0])
3421 except error.LookupError:
3421 except error.LookupError:
3422 self.ui.warn(_('mq status file refers to unknown node %s\n')
3422 self.ui.warn(_('mq status file refers to unknown node %s\n')
3423 % short(mqtags[-1][0]))
3423 % short(mqtags[-1][0]))
3424 return result
3424 return result
3425
3425
3426 # do not add fake tags for filtered revisions
3426 # do not add fake tags for filtered revisions
3427 included = self.changelog.hasnode
3427 included = self.changelog.hasnode
3428 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3428 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3429 if not mqtags:
3429 if not mqtags:
3430 return result
3430 return result
3431
3431
3432 mqtags.append((mqtags[-1][0], 'qtip'))
3432 mqtags.append((mqtags[-1][0], 'qtip'))
3433 mqtags.append((mqtags[0][0], 'qbase'))
3433 mqtags.append((mqtags[0][0], 'qbase'))
3434 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3434 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3435 tags = result[0]
3435 tags = result[0]
3436 for patch in mqtags:
3436 for patch in mqtags:
3437 if patch[1] in tags:
3437 if patch[1] in tags:
3438 self.ui.warn(_('tag %s overrides mq patch of the same '
3438 self.ui.warn(_('tag %s overrides mq patch of the same '
3439 'name\n') % patch[1])
3439 'name\n') % patch[1])
3440 else:
3440 else:
3441 tags[patch[1]] = patch[0]
3441 tags[patch[1]] = patch[0]
3442
3442
3443 return result
3443 return result
3444
3444
3445 if repo.local():
3445 if repo.local():
3446 repo.__class__ = mqrepo
3446 repo.__class__ = mqrepo
3447
3447
3448 repo._phasedefaults.append(mqphasedefaults)
3448 repo._phasedefaults.append(mqphasedefaults)
3449
3449
3450 def mqimport(orig, ui, repo, *args, **kwargs):
3450 def mqimport(orig, ui, repo, *args, **kwargs):
3451 if (util.safehasattr(repo, 'abortifwdirpatched')
3451 if (util.safehasattr(repo, 'abortifwdirpatched')
3452 and not kwargs.get('no_commit', False)):
3452 and not kwargs.get('no_commit', False)):
3453 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3453 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3454 kwargs.get('force'))
3454 kwargs.get('force'))
3455 return orig(ui, repo, *args, **kwargs)
3455 return orig(ui, repo, *args, **kwargs)
3456
3456
3457 def mqinit(orig, ui, *args, **kwargs):
3457 def mqinit(orig, ui, *args, **kwargs):
3458 mq = kwargs.pop('mq', None)
3458 mq = kwargs.pop('mq', None)
3459
3459
3460 if not mq:
3460 if not mq:
3461 return orig(ui, *args, **kwargs)
3461 return orig(ui, *args, **kwargs)
3462
3462
3463 if args:
3463 if args:
3464 repopath = args[0]
3464 repopath = args[0]
3465 if not hg.islocal(repopath):
3465 if not hg.islocal(repopath):
3466 raise util.Abort(_('only a local queue repository '
3466 raise util.Abort(_('only a local queue repository '
3467 'may be initialized'))
3467 'may be initialized'))
3468 else:
3468 else:
3469 repopath = cmdutil.findrepo(os.getcwd())
3469 repopath = cmdutil.findrepo(os.getcwd())
3470 if not repopath:
3470 if not repopath:
3471 raise util.Abort(_('there is no Mercurial repository here '
3471 raise util.Abort(_('there is no Mercurial repository here '
3472 '(.hg not found)'))
3472 '(.hg not found)'))
3473 repo = hg.repository(ui, repopath)
3473 repo = hg.repository(ui, repopath)
3474 return qinit(ui, repo, True)
3474 return qinit(ui, repo, True)
3475
3475
3476 def mqcommand(orig, ui, repo, *args, **kwargs):
3476 def mqcommand(orig, ui, repo, *args, **kwargs):
3477 """Add --mq option to operate on patch repository instead of main"""
3477 """Add --mq option to operate on patch repository instead of main"""
3478
3478
3479 # some commands do not like getting unknown options
3479 # some commands do not like getting unknown options
3480 mq = kwargs.pop('mq', None)
3480 mq = kwargs.pop('mq', None)
3481
3481
3482 if not mq:
3482 if not mq:
3483 return orig(ui, repo, *args, **kwargs)
3483 return orig(ui, repo, *args, **kwargs)
3484
3484
3485 q = repo.mq
3485 q = repo.mq
3486 r = q.qrepo()
3486 r = q.qrepo()
3487 if not r:
3487 if not r:
3488 raise util.Abort(_('no queue repository'))
3488 raise util.Abort(_('no queue repository'))
3489 return orig(r.ui, r, *args, **kwargs)
3489 return orig(r.ui, r, *args, **kwargs)
3490
3490
3491 def summaryhook(ui, repo):
3491 def summaryhook(ui, repo):
3492 q = repo.mq
3492 q = repo.mq
3493 m = []
3493 m = []
3494 a, u = len(q.applied), len(q.unapplied(repo))
3494 a, u = len(q.applied), len(q.unapplied(repo))
3495 if a:
3495 if a:
3496 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3496 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3497 if u:
3497 if u:
3498 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3498 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3499 if m:
3499 if m:
3500 # i18n: column positioning for "hg summary"
3500 # i18n: column positioning for "hg summary"
3501 ui.write(_("mq: %s\n") % ', '.join(m))
3501 ui.write(_("mq: %s\n") % ', '.join(m))
3502 else:
3502 else:
3503 # i18n: column positioning for "hg summary"
3503 # i18n: column positioning for "hg summary"
3504 ui.note(_("mq: (empty queue)\n"))
3504 ui.note(_("mq: (empty queue)\n"))
3505
3505
3506 def revsetmq(repo, subset, x):
3506 def revsetmq(repo, subset, x):
3507 """``mq()``
3507 """``mq()``
3508 Changesets managed by MQ.
3508 Changesets managed by MQ.
3509 """
3509 """
3510 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3510 revset.getargs(x, 0, 0, _("mq takes no arguments"))
3511 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3511 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3512 return revset.baseset([r for r in subset if r in applied])
3512 return revset.baseset([r for r in subset if r in applied])
3513
3513
3514 # tell hggettext to extract docstrings from these functions:
3514 # tell hggettext to extract docstrings from these functions:
3515 i18nfunctions = [revsetmq]
3515 i18nfunctions = [revsetmq]
3516
3516
3517 def extsetup(ui):
3517 def extsetup(ui):
3518 # Ensure mq wrappers are called first, regardless of extension load order by
3518 # Ensure mq wrappers are called first, regardless of extension load order by
3519 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3519 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3520 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3520 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3521
3521
3522 extensions.wrapcommand(commands.table, 'import', mqimport)
3522 extensions.wrapcommand(commands.table, 'import', mqimport)
3523 cmdutil.summaryhooks.add('mq', summaryhook)
3523 cmdutil.summaryhooks.add('mq', summaryhook)
3524
3524
3525 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3525 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3526 entry[1].extend(mqopt)
3526 entry[1].extend(mqopt)
3527
3527
3528 nowrap = set(commands.norepo.split(" "))
3528 nowrap = set(commands.norepo.split(" "))
3529
3529
3530 def dotable(cmdtable):
3530 def dotable(cmdtable):
3531 for cmd in cmdtable.keys():
3531 for cmd in cmdtable.keys():
3532 cmd = cmdutil.parsealiases(cmd)[0]
3532 cmd = cmdutil.parsealiases(cmd)[0]
3533 if cmd in nowrap:
3533 if cmd in nowrap:
3534 continue
3534 continue
3535 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3535 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3536 entry[1].extend(mqopt)
3536 entry[1].extend(mqopt)
3537
3537
3538 dotable(commands.table)
3538 dotable(commands.table)
3539
3539
3540 for extname, extmodule in extensions.extensions():
3540 for extname, extmodule in extensions.extensions():
3541 if extmodule.__file__ != __file__:
3541 if extmodule.__file__ != __file__:
3542 dotable(getattr(extmodule, 'cmdtable', {}))
3542 dotable(getattr(extmodule, 'cmdtable', {}))
3543
3543
3544 revset.symbols['mq'] = revsetmq
3544 revset.symbols['mq'] = revsetmq
3545
3545
3546 colortable = {'qguard.negative': 'red',
3546 colortable = {'qguard.negative': 'red',
3547 'qguard.positive': 'yellow',
3547 'qguard.positive': 'yellow',
3548 'qguard.unguarded': 'green',
3548 'qguard.unguarded': 'green',
3549 'qseries.applied': 'blue bold underline',
3549 'qseries.applied': 'blue bold underline',
3550 'qseries.guarded': 'black bold',
3550 'qseries.guarded': 'black bold',
3551 'qseries.missing': 'red bold',
3551 'qseries.missing': 'red bold',
3552 'qseries.unapplied': 'black bold'}
3552 'qseries.unapplied': 'black bold'}
@@ -1,653 +1,653 b''
1 # patchbomb.py - sending Mercurial changesets as patch emails
1 # patchbomb.py - sending Mercurial changesets as patch emails
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to send changesets as (a series of) patch emails
8 '''command to send changesets as (a series of) patch emails
9
9
10 The series is started off with a "[PATCH 0 of N]" introduction, which
10 The series is started off with a "[PATCH 0 of N]" introduction, which
11 describes the series as a whole.
11 describes the series as a whole.
12
12
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
13 Each patch email has a Subject line of "[PATCH M of N] ...", using the
14 first line of the changeset description as the subject text. The
14 first line of the changeset description as the subject text. The
15 message contains two or three body parts:
15 message contains two or three body parts:
16
16
17 - The changeset description.
17 - The changeset description.
18 - [Optional] The result of running diffstat on the patch.
18 - [Optional] The result of running diffstat on the patch.
19 - The patch itself, as generated by :hg:`export`.
19 - The patch itself, as generated by :hg:`export`.
20
20
21 Each message refers to the first in the series using the In-Reply-To
21 Each message refers to the first in the series using the In-Reply-To
22 and References headers, so they will show up as a sequence in threaded
22 and References headers, so they will show up as a sequence in threaded
23 mail and news readers, and in mail archives.
23 mail and news readers, and in mail archives.
24
24
25 To configure other defaults, add a section like this to your
25 To configure other defaults, add a section like this to your
26 configuration file::
26 configuration file::
27
27
28 [email]
28 [email]
29 from = My Name <my@email>
29 from = My Name <my@email>
30 to = recipient1, recipient2, ...
30 to = recipient1, recipient2, ...
31 cc = cc1, cc2, ...
31 cc = cc1, cc2, ...
32 bcc = bcc1, bcc2, ...
32 bcc = bcc1, bcc2, ...
33 reply-to = address1, address2, ...
33 reply-to = address1, address2, ...
34
34
35 Use ``[patchbomb]`` as configuration section name if you need to
35 Use ``[patchbomb]`` as configuration section name if you need to
36 override global ``[email]`` address settings.
36 override global ``[email]`` address settings.
37
37
38 Then you can use the :hg:`email` command to mail a series of
38 Then you can use the :hg:`email` command to mail a series of
39 changesets as a patchbomb.
39 changesets as a patchbomb.
40
40
41 You can also either configure the method option in the email section
41 You can also either configure the method option in the email section
42 to be a sendmail compatible mailer or fill out the [smtp] section so
42 to be a sendmail compatible mailer or fill out the [smtp] section so
43 that the patchbomb extension can automatically send patchbombs
43 that the patchbomb extension can automatically send patchbombs
44 directly from the commandline. See the [email] and [smtp] sections in
44 directly from the commandline. See the [email] and [smtp] sections in
45 hgrc(5) for details.
45 hgrc(5) for details.
46
46
47 You can control the default inclusion of an introduction message with the
47 You can control the default inclusion of an introduction message with the
48 ``patchbomb.intro`` configuration option. The configuration is always
48 ``patchbomb.intro`` configuration option. The configuration is always
49 overwritten by command line flags like --intro and --desc::
49 overwritten by command line flags like --intro and --desc::
50
50
51 [patchbomb]
51 [patchbomb]
52 intro=auto # include introduction message if more than 1 patch (default)
52 intro=auto # include introduction message if more than 1 patch (default)
53 intro=never # never include an introduction message
53 intro=never # never include an introduction message
54 intro=always # always include an introduction message
54 intro=always # always include an introduction message
55
55
56 You can set patchbomb to always ask for confirmation by setting
56 You can set patchbomb to always ask for confirmation by setting
57 ``patchbomb.confirm`` to true.
57 ``patchbomb.confirm`` to true.
58 '''
58 '''
59
59
60 import os, errno, socket, tempfile, cStringIO
60 import os, errno, socket, tempfile, cStringIO
61 import email
61 import email
62 # On python2.4 you have to import these by name or they fail to
62 # On python2.4 you have to import these by name or they fail to
63 # load. This was not a problem on Python 2.7.
63 # load. This was not a problem on Python 2.7.
64 import email.Generator
64 import email.Generator
65 import email.MIMEMultipart
65 import email.MIMEMultipart
66
66
67 from mercurial import cmdutil, commands, hg, mail, patch, util
67 from mercurial import cmdutil, commands, hg, mail, patch, util
68 from mercurial import scmutil
68 from mercurial import scmutil
69 from mercurial.i18n import _
69 from mercurial.i18n import _
70 from mercurial.node import bin
70 from mercurial.node import bin
71
71
72 cmdtable = {}
72 cmdtable = {}
73 command = cmdutil.command(cmdtable)
73 command = cmdutil.command(cmdtable)
74 testedwith = 'internal'
74 testedwith = 'internal'
75
75
76 def prompt(ui, prompt, default=None, rest=':'):
76 def prompt(ui, prompt, default=None, rest=':'):
77 if default:
77 if default:
78 prompt += ' [%s]' % default
78 prompt += ' [%s]' % default
79 return ui.prompt(prompt + rest, default)
79 return ui.prompt(prompt + rest, default)
80
80
81 def introwanted(ui, opts, number):
81 def introwanted(ui, opts, number):
82 '''is an introductory message apparently wanted?'''
82 '''is an introductory message apparently wanted?'''
83 introconfig = ui.config('patchbomb', 'intro', 'auto')
83 introconfig = ui.config('patchbomb', 'intro', 'auto')
84 if opts.get('intro') or opts.get('desc'):
84 if opts.get('intro') or opts.get('desc'):
85 intro = True
85 intro = True
86 elif introconfig == 'always':
86 elif introconfig == 'always':
87 intro = True
87 intro = True
88 elif introconfig == 'never':
88 elif introconfig == 'never':
89 intro = False
89 intro = False
90 elif introconfig == 'auto':
90 elif introconfig == 'auto':
91 intro = 1 < number
91 intro = 1 < number
92 else:
92 else:
93 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
93 ui.write_err(_('warning: invalid patchbomb.intro value "%s"\n')
94 % introconfig)
94 % introconfig)
95 ui.write_err(_('(should be one of always, never, auto)\n'))
95 ui.write_err(_('(should be one of always, never, auto)\n'))
96 intro = 1 < number
96 intro = 1 < number
97 return intro
97 return intro
98
98
99 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
99 def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, numbered,
100 patchname=None):
100 patchname=None):
101
101
102 desc = []
102 desc = []
103 node = None
103 node = None
104 body = ''
104 body = ''
105
105
106 for line in patchlines:
106 for line in patchlines:
107 if line.startswith('#'):
107 if line.startswith('#'):
108 if line.startswith('# Node ID'):
108 if line.startswith('# Node ID'):
109 node = line.split()[-1]
109 node = line.split()[-1]
110 continue
110 continue
111 if line.startswith('diff -r') or line.startswith('diff --git'):
111 if line.startswith('diff -r') or line.startswith('diff --git'):
112 break
112 break
113 desc.append(line)
113 desc.append(line)
114
114
115 if not patchname and not node:
115 if not patchname and not node:
116 raise ValueError
116 raise ValueError
117
117
118 if opts.get('attach') and not opts.get('body'):
118 if opts.get('attach') and not opts.get('body'):
119 body = ('\n'.join(desc[1:]).strip() or
119 body = ('\n'.join(desc[1:]).strip() or
120 'Patch subject is complete summary.')
120 'Patch subject is complete summary.')
121 body += '\n\n\n'
121 body += '\n\n\n'
122
122
123 if opts.get('plain'):
123 if opts.get('plain'):
124 while patchlines and patchlines[0].startswith('# '):
124 while patchlines and patchlines[0].startswith('# '):
125 patchlines.pop(0)
125 patchlines.pop(0)
126 if patchlines:
126 if patchlines:
127 patchlines.pop(0)
127 patchlines.pop(0)
128 while patchlines and not patchlines[0].strip():
128 while patchlines and not patchlines[0].strip():
129 patchlines.pop(0)
129 patchlines.pop(0)
130
130
131 ds = patch.diffstat(patchlines, git=opts.get('git'))
131 ds = patch.diffstat(patchlines, git=opts.get('git'))
132 if opts.get('diffstat'):
132 if opts.get('diffstat'):
133 body += ds + '\n\n'
133 body += ds + '\n\n'
134
134
135 addattachment = opts.get('attach') or opts.get('inline')
135 addattachment = opts.get('attach') or opts.get('inline')
136 if not addattachment or opts.get('body'):
136 if not addattachment or opts.get('body'):
137 body += '\n'.join(patchlines)
137 body += '\n'.join(patchlines)
138
138
139 if addattachment:
139 if addattachment:
140 msg = email.MIMEMultipart.MIMEMultipart()
140 msg = email.MIMEMultipart.MIMEMultipart()
141 if body:
141 if body:
142 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
142 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
143 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
143 p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch',
144 opts.get('test'))
144 opts.get('test'))
145 binnode = bin(node)
145 binnode = bin(node)
146 # if node is mq patch, it will have the patch file's name as a tag
146 # if node is mq patch, it will have the patch file's name as a tag
147 if not patchname:
147 if not patchname:
148 patchtags = [t for t in repo.nodetags(binnode)
148 patchtags = [t for t in repo.nodetags(binnode)
149 if t.endswith('.patch') or t.endswith('.diff')]
149 if t.endswith('.patch') or t.endswith('.diff')]
150 if patchtags:
150 if patchtags:
151 patchname = patchtags[0]
151 patchname = patchtags[0]
152 elif total > 1:
152 elif total > 1:
153 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
153 patchname = cmdutil.makefilename(repo, '%b-%n.patch',
154 binnode, seqno=idx,
154 binnode, seqno=idx,
155 total=total)
155 total=total)
156 else:
156 else:
157 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
157 patchname = cmdutil.makefilename(repo, '%b.patch', binnode)
158 disposition = 'inline'
158 disposition = 'inline'
159 if opts.get('attach'):
159 if opts.get('attach'):
160 disposition = 'attachment'
160 disposition = 'attachment'
161 p['Content-Disposition'] = disposition + '; filename=' + patchname
161 p['Content-Disposition'] = disposition + '; filename=' + patchname
162 msg.attach(p)
162 msg.attach(p)
163 else:
163 else:
164 msg = mail.mimetextpatch(body, display=opts.get('test'))
164 msg = mail.mimetextpatch(body, display=opts.get('test'))
165
165
166 flag = ' '.join(opts.get('flag'))
166 flag = ' '.join(opts.get('flag'))
167 if flag:
167 if flag:
168 flag = ' ' + flag
168 flag = ' ' + flag
169
169
170 subj = desc[0].strip().rstrip('. ')
170 subj = desc[0].strip().rstrip('. ')
171 if not numbered:
171 if not numbered:
172 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
172 subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
173 else:
173 else:
174 tlen = len(str(total))
174 tlen = len(str(total))
175 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
175 subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
176 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
176 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
177 msg['X-Mercurial-Node'] = node
177 msg['X-Mercurial-Node'] = node
178 msg['X-Mercurial-Series-Index'] = '%i' % idx
178 msg['X-Mercurial-Series-Index'] = '%i' % idx
179 msg['X-Mercurial-Series-Total'] = '%i' % total
179 msg['X-Mercurial-Series-Total'] = '%i' % total
180 return msg, subj, ds
180 return msg, subj, ds
181
181
182 def _getpatches(repo, revs, **opts):
182 def _getpatches(repo, revs, **opts):
183 """return a list of patches for a list of revisions
183 """return a list of patches for a list of revisions
184
184
185 Each patch in the list is itself a list of lines.
185 Each patch in the list is itself a list of lines.
186 """
186 """
187 ui = repo.ui
187 ui = repo.ui
188 prev = repo['.'].rev()
188 prev = repo['.'].rev()
189 for r in scmutil.revrange(repo, revs):
189 for r in scmutil.revrange(repo, revs):
190 if r == prev and (repo[None].files() or repo[None].deleted()):
190 if r == prev and (repo[None].files() or repo[None].deleted()):
191 ui.warn(_('warning: working directory has '
191 ui.warn(_('warning: working directory has '
192 'uncommitted changes\n'))
192 'uncommitted changes\n'))
193 output = cStringIO.StringIO()
193 output = cStringIO.StringIO()
194 cmdutil.export(repo, [r], fp=output,
194 cmdutil.export(repo, [r], fp=output,
195 opts=patch.difffeatureopts(ui, opts, git=True))
195 opts=patch.difffeatureopts(ui, opts, git=True))
196 yield output.getvalue().split('\n')
196 yield output.getvalue().split('\n')
197 def _getbundle(repo, dest, **opts):
197 def _getbundle(repo, dest, **opts):
198 """return a bundle containing changesets missing in "dest"
198 """return a bundle containing changesets missing in "dest"
199
199
200 The `opts` keyword-arguments are the same as the one accepted by the
200 The `opts` keyword-arguments are the same as the one accepted by the
201 `bundle` command.
201 `bundle` command.
202
202
203 The bundle is a returned as a single in-memory binary blob.
203 The bundle is a returned as a single in-memory binary blob.
204 """
204 """
205 ui = repo.ui
205 ui = repo.ui
206 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
206 tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
207 tmpfn = os.path.join(tmpdir, 'bundle')
207 tmpfn = os.path.join(tmpdir, 'bundle')
208 try:
208 try:
209 commands.bundle(ui, repo, tmpfn, dest, **opts)
209 commands.bundle(ui, repo, tmpfn, dest, **opts)
210 fp = open(tmpfn, 'rb')
210 fp = open(tmpfn, 'rb')
211 data = fp.read()
211 data = fp.read()
212 fp.close()
212 fp.close()
213 return data
213 return data
214 finally:
214 finally:
215 try:
215 try:
216 os.unlink(tmpfn)
216 os.unlink(tmpfn)
217 except OSError:
217 except OSError:
218 pass
218 pass
219 os.rmdir(tmpdir)
219 os.rmdir(tmpdir)
220
220
221 def _getdescription(repo, defaultbody, sender, **opts):
221 def _getdescription(repo, defaultbody, sender, **opts):
222 """obtain the body of the introduction message and return it
222 """obtain the body of the introduction message and return it
223
223
224 This is also used for the body of email with an attached bundle.
224 This is also used for the body of email with an attached bundle.
225
225
226 The body can be obtained either from the command line option or entered by
226 The body can be obtained either from the command line option or entered by
227 the user through the editor.
227 the user through the editor.
228 """
228 """
229 ui = repo.ui
229 ui = repo.ui
230 if opts.get('desc'):
230 if opts.get('desc'):
231 body = open(opts.get('desc')).read()
231 body = open(opts.get('desc')).read()
232 else:
232 else:
233 ui.write(_('\nWrite the introductory message for the '
233 ui.write(_('\nWrite the introductory message for the '
234 'patch series.\n\n'))
234 'patch series.\n\n'))
235 body = ui.edit(defaultbody, sender)
235 body = ui.edit(defaultbody, sender)
236 # Save series description in case sendmail fails
236 # Save series description in case sendmail fails
237 msgfile = repo.opener('last-email.txt', 'wb')
237 msgfile = repo.vfs('last-email.txt', 'wb')
238 msgfile.write(body)
238 msgfile.write(body)
239 msgfile.close()
239 msgfile.close()
240 return body
240 return body
241
241
242 def _getbundlemsgs(repo, sender, bundle, **opts):
242 def _getbundlemsgs(repo, sender, bundle, **opts):
243 """Get the full email for sending a given bundle
243 """Get the full email for sending a given bundle
244
244
245 This function returns a list of "email" tuples (subject, content, None).
245 This function returns a list of "email" tuples (subject, content, None).
246 The list is always one message long in that case.
246 The list is always one message long in that case.
247 """
247 """
248 ui = repo.ui
248 ui = repo.ui
249 _charsets = mail._charsets(ui)
249 _charsets = mail._charsets(ui)
250 subj = (opts.get('subject')
250 subj = (opts.get('subject')
251 or prompt(ui, 'Subject:', 'A bundle for your repository'))
251 or prompt(ui, 'Subject:', 'A bundle for your repository'))
252
252
253 body = _getdescription(repo, '', sender, **opts)
253 body = _getdescription(repo, '', sender, **opts)
254 msg = email.MIMEMultipart.MIMEMultipart()
254 msg = email.MIMEMultipart.MIMEMultipart()
255 if body:
255 if body:
256 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
256 msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
257 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
257 datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
258 datapart.set_payload(bundle)
258 datapart.set_payload(bundle)
259 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
259 bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
260 datapart.add_header('Content-Disposition', 'attachment',
260 datapart.add_header('Content-Disposition', 'attachment',
261 filename=bundlename)
261 filename=bundlename)
262 email.Encoders.encode_base64(datapart)
262 email.Encoders.encode_base64(datapart)
263 msg.attach(datapart)
263 msg.attach(datapart)
264 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
264 msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
265 return [(msg, subj, None)]
265 return [(msg, subj, None)]
266
266
267 def _makeintro(repo, sender, patches, **opts):
267 def _makeintro(repo, sender, patches, **opts):
268 """make an introduction email, asking the user for content if needed
268 """make an introduction email, asking the user for content if needed
269
269
270 email is returned as (subject, body, cumulative-diffstat)"""
270 email is returned as (subject, body, cumulative-diffstat)"""
271 ui = repo.ui
271 ui = repo.ui
272 _charsets = mail._charsets(ui)
272 _charsets = mail._charsets(ui)
273 tlen = len(str(len(patches)))
273 tlen = len(str(len(patches)))
274
274
275 flag = opts.get('flag') or ''
275 flag = opts.get('flag') or ''
276 if flag:
276 if flag:
277 flag = ' ' + ' '.join(flag)
277 flag = ' ' + ' '.join(flag)
278 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
278 prefix = '[PATCH %0*d of %d%s]' % (tlen, 0, len(patches), flag)
279
279
280 subj = (opts.get('subject') or
280 subj = (opts.get('subject') or
281 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
281 prompt(ui, '(optional) Subject: ', rest=prefix, default=''))
282 if not subj:
282 if not subj:
283 return None # skip intro if the user doesn't bother
283 return None # skip intro if the user doesn't bother
284
284
285 subj = prefix + ' ' + subj
285 subj = prefix + ' ' + subj
286
286
287 body = ''
287 body = ''
288 if opts.get('diffstat'):
288 if opts.get('diffstat'):
289 # generate a cumulative diffstat of the whole patch series
289 # generate a cumulative diffstat of the whole patch series
290 diffstat = patch.diffstat(sum(patches, []))
290 diffstat = patch.diffstat(sum(patches, []))
291 body = '\n' + diffstat
291 body = '\n' + diffstat
292 else:
292 else:
293 diffstat = None
293 diffstat = None
294
294
295 body = _getdescription(repo, body, sender, **opts)
295 body = _getdescription(repo, body, sender, **opts)
296 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
296 msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
297 msg['Subject'] = mail.headencode(ui, subj, _charsets,
297 msg['Subject'] = mail.headencode(ui, subj, _charsets,
298 opts.get('test'))
298 opts.get('test'))
299 return (msg, subj, diffstat)
299 return (msg, subj, diffstat)
300
300
301 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
301 def _getpatchmsgs(repo, sender, patches, patchnames=None, **opts):
302 """return a list of emails from a list of patches
302 """return a list of emails from a list of patches
303
303
304 This involves introduction message creation if necessary.
304 This involves introduction message creation if necessary.
305
305
306 This function returns a list of "email" tuples (subject, content, None).
306 This function returns a list of "email" tuples (subject, content, None).
307 """
307 """
308 ui = repo.ui
308 ui = repo.ui
309 _charsets = mail._charsets(ui)
309 _charsets = mail._charsets(ui)
310 msgs = []
310 msgs = []
311
311
312 ui.write(_('this patch series consists of %d patches.\n\n')
312 ui.write(_('this patch series consists of %d patches.\n\n')
313 % len(patches))
313 % len(patches))
314
314
315 # build the intro message, or skip it if the user declines
315 # build the intro message, or skip it if the user declines
316 if introwanted(ui, opts, len(patches)):
316 if introwanted(ui, opts, len(patches)):
317 msg = _makeintro(repo, sender, patches, **opts)
317 msg = _makeintro(repo, sender, patches, **opts)
318 if msg:
318 if msg:
319 msgs.append(msg)
319 msgs.append(msg)
320
320
321 # are we going to send more than one message?
321 # are we going to send more than one message?
322 numbered = len(msgs) + len(patches) > 1
322 numbered = len(msgs) + len(patches) > 1
323
323
324 # now generate the actual patch messages
324 # now generate the actual patch messages
325 name = None
325 name = None
326 for i, p in enumerate(patches):
326 for i, p in enumerate(patches):
327 if patchnames:
327 if patchnames:
328 name = patchnames[i]
328 name = patchnames[i]
329 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
329 msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
330 len(patches), numbered, name)
330 len(patches), numbered, name)
331 msgs.append(msg)
331 msgs.append(msg)
332
332
333 return msgs
333 return msgs
334
334
335 def _getoutgoing(repo, dest, revs):
335 def _getoutgoing(repo, dest, revs):
336 '''Return the revisions present locally but not in dest'''
336 '''Return the revisions present locally but not in dest'''
337 ui = repo.ui
337 ui = repo.ui
338 url = ui.expandpath(dest or 'default-push', dest or 'default')
338 url = ui.expandpath(dest or 'default-push', dest or 'default')
339 url = hg.parseurl(url)[0]
339 url = hg.parseurl(url)[0]
340 ui.status(_('comparing with %s\n') % util.hidepassword(url))
340 ui.status(_('comparing with %s\n') % util.hidepassword(url))
341
341
342 revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
342 revs = [r for r in scmutil.revrange(repo, revs) if r >= 0]
343 if not revs:
343 if not revs:
344 revs = [len(repo) - 1]
344 revs = [len(repo) - 1]
345 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
345 revs = repo.revs('outgoing(%s) and ::%ld', dest or '', revs)
346 if not revs:
346 if not revs:
347 ui.status(_("no changes found\n"))
347 ui.status(_("no changes found\n"))
348 return []
348 return []
349 return [str(r) for r in revs]
349 return [str(r) for r in revs]
350
350
351 emailopts = [
351 emailopts = [
352 ('', 'body', None, _('send patches as inline message text (default)')),
352 ('', 'body', None, _('send patches as inline message text (default)')),
353 ('a', 'attach', None, _('send patches as attachments')),
353 ('a', 'attach', None, _('send patches as attachments')),
354 ('i', 'inline', None, _('send patches as inline attachments')),
354 ('i', 'inline', None, _('send patches as inline attachments')),
355 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
355 ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
356 ('c', 'cc', [], _('email addresses of copy recipients')),
356 ('c', 'cc', [], _('email addresses of copy recipients')),
357 ('', 'confirm', None, _('ask for confirmation before sending')),
357 ('', 'confirm', None, _('ask for confirmation before sending')),
358 ('d', 'diffstat', None, _('add diffstat output to messages')),
358 ('d', 'diffstat', None, _('add diffstat output to messages')),
359 ('', 'date', '', _('use the given date as the sending date')),
359 ('', 'date', '', _('use the given date as the sending date')),
360 ('', 'desc', '', _('use the given file as the series description')),
360 ('', 'desc', '', _('use the given file as the series description')),
361 ('f', 'from', '', _('email address of sender')),
361 ('f', 'from', '', _('email address of sender')),
362 ('n', 'test', None, _('print messages that would be sent')),
362 ('n', 'test', None, _('print messages that would be sent')),
363 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
363 ('m', 'mbox', '', _('write messages to mbox file instead of sending them')),
364 ('', 'reply-to', [], _('email addresses replies should be sent to')),
364 ('', 'reply-to', [], _('email addresses replies should be sent to')),
365 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
365 ('s', 'subject', '', _('subject of first message (intro or single patch)')),
366 ('', 'in-reply-to', '', _('message identifier to reply to')),
366 ('', 'in-reply-to', '', _('message identifier to reply to')),
367 ('', 'flag', [], _('flags to add in subject prefixes')),
367 ('', 'flag', [], _('flags to add in subject prefixes')),
368 ('t', 'to', [], _('email addresses of recipients'))]
368 ('t', 'to', [], _('email addresses of recipients'))]
369
369
370 @command('email',
370 @command('email',
371 [('g', 'git', None, _('use git extended diff format')),
371 [('g', 'git', None, _('use git extended diff format')),
372 ('', 'plain', None, _('omit hg patch header')),
372 ('', 'plain', None, _('omit hg patch header')),
373 ('o', 'outgoing', None,
373 ('o', 'outgoing', None,
374 _('send changes not found in the target repository')),
374 _('send changes not found in the target repository')),
375 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
375 ('b', 'bundle', None, _('send changes not in target as a binary bundle')),
376 ('', 'bundlename', 'bundle',
376 ('', 'bundlename', 'bundle',
377 _('name of the bundle attachment file'), _('NAME')),
377 _('name of the bundle attachment file'), _('NAME')),
378 ('r', 'rev', [], _('a revision to send'), _('REV')),
378 ('r', 'rev', [], _('a revision to send'), _('REV')),
379 ('', 'force', None, _('run even when remote repository is unrelated '
379 ('', 'force', None, _('run even when remote repository is unrelated '
380 '(with -b/--bundle)')),
380 '(with -b/--bundle)')),
381 ('', 'base', [], _('a base changeset to specify instead of a destination '
381 ('', 'base', [], _('a base changeset to specify instead of a destination '
382 '(with -b/--bundle)'), _('REV')),
382 '(with -b/--bundle)'), _('REV')),
383 ('', 'intro', None, _('send an introduction email for a single patch')),
383 ('', 'intro', None, _('send an introduction email for a single patch')),
384 ] + emailopts + commands.remoteopts,
384 ] + emailopts + commands.remoteopts,
385 _('hg email [OPTION]... [DEST]...'))
385 _('hg email [OPTION]... [DEST]...'))
386 def patchbomb(ui, repo, *revs, **opts):
386 def patchbomb(ui, repo, *revs, **opts):
387 '''send changesets by email
387 '''send changesets by email
388
388
389 By default, diffs are sent in the format generated by
389 By default, diffs are sent in the format generated by
390 :hg:`export`, one per message. The series starts with a "[PATCH 0
390 :hg:`export`, one per message. The series starts with a "[PATCH 0
391 of N]" introduction, which describes the series as a whole.
391 of N]" introduction, which describes the series as a whole.
392
392
393 Each patch email has a Subject line of "[PATCH M of N] ...", using
393 Each patch email has a Subject line of "[PATCH M of N] ...", using
394 the first line of the changeset description as the subject text.
394 the first line of the changeset description as the subject text.
395 The message contains two or three parts. First, the changeset
395 The message contains two or three parts. First, the changeset
396 description.
396 description.
397
397
398 With the -d/--diffstat option, if the diffstat program is
398 With the -d/--diffstat option, if the diffstat program is
399 installed, the result of running diffstat on the patch is inserted.
399 installed, the result of running diffstat on the patch is inserted.
400
400
401 Finally, the patch itself, as generated by :hg:`export`.
401 Finally, the patch itself, as generated by :hg:`export`.
402
402
403 With the -d/--diffstat or --confirm options, you will be presented
403 With the -d/--diffstat or --confirm options, you will be presented
404 with a final summary of all messages and asked for confirmation before
404 with a final summary of all messages and asked for confirmation before
405 the messages are sent.
405 the messages are sent.
406
406
407 By default the patch is included as text in the email body for
407 By default the patch is included as text in the email body for
408 easy reviewing. Using the -a/--attach option will instead create
408 easy reviewing. Using the -a/--attach option will instead create
409 an attachment for the patch. With -i/--inline an inline attachment
409 an attachment for the patch. With -i/--inline an inline attachment
410 will be created. You can include a patch both as text in the email
410 will be created. You can include a patch both as text in the email
411 body and as a regular or an inline attachment by combining the
411 body and as a regular or an inline attachment by combining the
412 -a/--attach or -i/--inline with the --body option.
412 -a/--attach or -i/--inline with the --body option.
413
413
414 With -o/--outgoing, emails will be generated for patches not found
414 With -o/--outgoing, emails will be generated for patches not found
415 in the destination repository (or only those which are ancestors
415 in the destination repository (or only those which are ancestors
416 of the specified revisions if any are provided)
416 of the specified revisions if any are provided)
417
417
418 With -b/--bundle, changesets are selected as for --outgoing, but a
418 With -b/--bundle, changesets are selected as for --outgoing, but a
419 single email containing a binary Mercurial bundle as an attachment
419 single email containing a binary Mercurial bundle as an attachment
420 will be sent.
420 will be sent.
421
421
422 With -m/--mbox, instead of previewing each patchbomb message in a
422 With -m/--mbox, instead of previewing each patchbomb message in a
423 pager or sending the messages directly, it will create a UNIX
423 pager or sending the messages directly, it will create a UNIX
424 mailbox file with the patch emails. This mailbox file can be
424 mailbox file with the patch emails. This mailbox file can be
425 previewed with any mail user agent which supports UNIX mbox
425 previewed with any mail user agent which supports UNIX mbox
426 files.
426 files.
427
427
428 With -n/--test, all steps will run, but mail will not be sent.
428 With -n/--test, all steps will run, but mail will not be sent.
429 You will be prompted for an email recipient address, a subject and
429 You will be prompted for an email recipient address, a subject and
430 an introductory message describing the patches of your patchbomb.
430 an introductory message describing the patches of your patchbomb.
431 Then when all is done, patchbomb messages are displayed. If the
431 Then when all is done, patchbomb messages are displayed. If the
432 PAGER environment variable is set, your pager will be fired up once
432 PAGER environment variable is set, your pager will be fired up once
433 for each patchbomb message, so you can verify everything is alright.
433 for each patchbomb message, so you can verify everything is alright.
434
434
435 In case email sending fails, you will find a backup of your series
435 In case email sending fails, you will find a backup of your series
436 introductory message in ``.hg/last-email.txt``.
436 introductory message in ``.hg/last-email.txt``.
437
437
438 The default behavior of this command can be customized through
438 The default behavior of this command can be customized through
439 configuration. (See :hg:`help patchbomb` for details)
439 configuration. (See :hg:`help patchbomb` for details)
440
440
441 Examples::
441 Examples::
442
442
443 hg email -r 3000 # send patch 3000 only
443 hg email -r 3000 # send patch 3000 only
444 hg email -r 3000 -r 3001 # send patches 3000 and 3001
444 hg email -r 3000 -r 3001 # send patches 3000 and 3001
445 hg email -r 3000:3005 # send patches 3000 through 3005
445 hg email -r 3000:3005 # send patches 3000 through 3005
446 hg email 3000 # send patch 3000 (deprecated)
446 hg email 3000 # send patch 3000 (deprecated)
447
447
448 hg email -o # send all patches not in default
448 hg email -o # send all patches not in default
449 hg email -o DEST # send all patches not in DEST
449 hg email -o DEST # send all patches not in DEST
450 hg email -o -r 3000 # send all ancestors of 3000 not in default
450 hg email -o -r 3000 # send all ancestors of 3000 not in default
451 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
451 hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
452
452
453 hg email -b # send bundle of all patches not in default
453 hg email -b # send bundle of all patches not in default
454 hg email -b DEST # send bundle of all patches not in DEST
454 hg email -b DEST # send bundle of all patches not in DEST
455 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
455 hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
456 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
456 hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
457
457
458 hg email -o -m mbox && # generate an mbox file...
458 hg email -o -m mbox && # generate an mbox file...
459 mutt -R -f mbox # ... and view it with mutt
459 mutt -R -f mbox # ... and view it with mutt
460 hg email -o -m mbox && # generate an mbox file ...
460 hg email -o -m mbox && # generate an mbox file ...
461 formail -s sendmail \\ # ... and use formail to send from the mbox
461 formail -s sendmail \\ # ... and use formail to send from the mbox
462 -bm -t < mbox # ... using sendmail
462 -bm -t < mbox # ... using sendmail
463
463
464 Before using this command, you will need to enable email in your
464 Before using this command, you will need to enable email in your
465 hgrc. See the [email] section in hgrc(5) for details.
465 hgrc. See the [email] section in hgrc(5) for details.
466 '''
466 '''
467
467
468 _charsets = mail._charsets(ui)
468 _charsets = mail._charsets(ui)
469
469
470 bundle = opts.get('bundle')
470 bundle = opts.get('bundle')
471 date = opts.get('date')
471 date = opts.get('date')
472 mbox = opts.get('mbox')
472 mbox = opts.get('mbox')
473 outgoing = opts.get('outgoing')
473 outgoing = opts.get('outgoing')
474 rev = opts.get('rev')
474 rev = opts.get('rev')
475 # internal option used by pbranches
475 # internal option used by pbranches
476 patches = opts.get('patches')
476 patches = opts.get('patches')
477
477
478 if not (opts.get('test') or mbox):
478 if not (opts.get('test') or mbox):
479 # really sending
479 # really sending
480 mail.validateconfig(ui)
480 mail.validateconfig(ui)
481
481
482 if not (revs or rev or outgoing or bundle or patches):
482 if not (revs or rev or outgoing or bundle or patches):
483 raise util.Abort(_('specify at least one changeset with -r or -o'))
483 raise util.Abort(_('specify at least one changeset with -r or -o'))
484
484
485 if outgoing and bundle:
485 if outgoing and bundle:
486 raise util.Abort(_("--outgoing mode always on with --bundle;"
486 raise util.Abort(_("--outgoing mode always on with --bundle;"
487 " do not re-specify --outgoing"))
487 " do not re-specify --outgoing"))
488
488
489 if outgoing or bundle:
489 if outgoing or bundle:
490 if len(revs) > 1:
490 if len(revs) > 1:
491 raise util.Abort(_("too many destinations"))
491 raise util.Abort(_("too many destinations"))
492 dest = revs and revs[0] or None
492 dest = revs and revs[0] or None
493 revs = []
493 revs = []
494
494
495 if rev:
495 if rev:
496 if revs:
496 if revs:
497 raise util.Abort(_('use only one form to specify the revision'))
497 raise util.Abort(_('use only one form to specify the revision'))
498 revs = rev
498 revs = rev
499
499
500 if outgoing:
500 if outgoing:
501 revs = _getoutgoing(repo, dest, rev)
501 revs = _getoutgoing(repo, dest, rev)
502 if bundle:
502 if bundle:
503 opts['revs'] = revs
503 opts['revs'] = revs
504
504
505 # start
505 # start
506 if date:
506 if date:
507 start_time = util.parsedate(date)
507 start_time = util.parsedate(date)
508 else:
508 else:
509 start_time = util.makedate()
509 start_time = util.makedate()
510
510
511 def genmsgid(id):
511 def genmsgid(id):
512 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
512 return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
513
513
514 sender = (opts.get('from') or ui.config('email', 'from') or
514 sender = (opts.get('from') or ui.config('email', 'from') or
515 ui.config('patchbomb', 'from') or
515 ui.config('patchbomb', 'from') or
516 prompt(ui, 'From', ui.username()))
516 prompt(ui, 'From', ui.username()))
517
517
518 if patches:
518 if patches:
519 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
519 msgs = _getpatchmsgs(repo, sender, patches, opts.get('patchnames'),
520 **opts)
520 **opts)
521 elif bundle:
521 elif bundle:
522 bundledata = _getbundle(repo, dest, **opts)
522 bundledata = _getbundle(repo, dest, **opts)
523 bundleopts = opts.copy()
523 bundleopts = opts.copy()
524 bundleopts.pop('bundle', None) # already processed
524 bundleopts.pop('bundle', None) # already processed
525 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
525 msgs = _getbundlemsgs(repo, sender, bundledata, **bundleopts)
526 else:
526 else:
527 _patches = list(_getpatches(repo, revs, **opts))
527 _patches = list(_getpatches(repo, revs, **opts))
528 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
528 msgs = _getpatchmsgs(repo, sender, _patches, **opts)
529
529
530 showaddrs = []
530 showaddrs = []
531
531
532 def getaddrs(header, ask=False, default=None):
532 def getaddrs(header, ask=False, default=None):
533 configkey = header.lower()
533 configkey = header.lower()
534 opt = header.replace('-', '_').lower()
534 opt = header.replace('-', '_').lower()
535 addrs = opts.get(opt)
535 addrs = opts.get(opt)
536 if addrs:
536 if addrs:
537 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
537 showaddrs.append('%s: %s' % (header, ', '.join(addrs)))
538 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
538 return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
539
539
540 # not on the command line: fallback to config and then maybe ask
540 # not on the command line: fallback to config and then maybe ask
541 addr = (ui.config('email', configkey) or
541 addr = (ui.config('email', configkey) or
542 ui.config('patchbomb', configkey) or
542 ui.config('patchbomb', configkey) or
543 '')
543 '')
544 if not addr and ask:
544 if not addr and ask:
545 addr = prompt(ui, header, default=default)
545 addr = prompt(ui, header, default=default)
546 if addr:
546 if addr:
547 showaddrs.append('%s: %s' % (header, addr))
547 showaddrs.append('%s: %s' % (header, addr))
548 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
548 return mail.addrlistencode(ui, [addr], _charsets, opts.get('test'))
549 else:
549 else:
550 return default
550 return default
551
551
552 to = getaddrs('To', ask=True)
552 to = getaddrs('To', ask=True)
553 if not to:
553 if not to:
554 # we can get here in non-interactive mode
554 # we can get here in non-interactive mode
555 raise util.Abort(_('no recipient addresses provided'))
555 raise util.Abort(_('no recipient addresses provided'))
556 cc = getaddrs('Cc', ask=True, default='') or []
556 cc = getaddrs('Cc', ask=True, default='') or []
557 bcc = getaddrs('Bcc') or []
557 bcc = getaddrs('Bcc') or []
558 replyto = getaddrs('Reply-To')
558 replyto = getaddrs('Reply-To')
559
559
560 confirm = ui.configbool('patchbomb', 'confirm')
560 confirm = ui.configbool('patchbomb', 'confirm')
561 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
561 confirm |= bool(opts.get('diffstat') or opts.get('confirm'))
562
562
563 if confirm:
563 if confirm:
564 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
564 ui.write(_('\nFinal summary:\n\n'), label='patchbomb.finalsummary')
565 ui.write(('From: %s\n' % sender), label='patchbomb.from')
565 ui.write(('From: %s\n' % sender), label='patchbomb.from')
566 for addr in showaddrs:
566 for addr in showaddrs:
567 ui.write('%s\n' % addr, label='patchbomb.to')
567 ui.write('%s\n' % addr, label='patchbomb.to')
568 for m, subj, ds in msgs:
568 for m, subj, ds in msgs:
569 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
569 ui.write(('Subject: %s\n' % subj), label='patchbomb.subject')
570 if ds:
570 if ds:
571 ui.write(ds, label='patchbomb.diffstats')
571 ui.write(ds, label='patchbomb.diffstats')
572 ui.write('\n')
572 ui.write('\n')
573 if ui.promptchoice(_('are you sure you want to send (yn)?'
573 if ui.promptchoice(_('are you sure you want to send (yn)?'
574 '$$ &Yes $$ &No')):
574 '$$ &Yes $$ &No')):
575 raise util.Abort(_('patchbomb canceled'))
575 raise util.Abort(_('patchbomb canceled'))
576
576
577 ui.write('\n')
577 ui.write('\n')
578
578
579 parent = opts.get('in_reply_to') or None
579 parent = opts.get('in_reply_to') or None
580 # angle brackets may be omitted, they're not semantically part of the msg-id
580 # angle brackets may be omitted, they're not semantically part of the msg-id
581 if parent is not None:
581 if parent is not None:
582 if not parent.startswith('<'):
582 if not parent.startswith('<'):
583 parent = '<' + parent
583 parent = '<' + parent
584 if not parent.endswith('>'):
584 if not parent.endswith('>'):
585 parent += '>'
585 parent += '>'
586
586
587 sender_addr = email.Utils.parseaddr(sender)[1]
587 sender_addr = email.Utils.parseaddr(sender)[1]
588 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
588 sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
589 sendmail = None
589 sendmail = None
590 firstpatch = None
590 firstpatch = None
591 for i, (m, subj, ds) in enumerate(msgs):
591 for i, (m, subj, ds) in enumerate(msgs):
592 try:
592 try:
593 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
593 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
594 if not firstpatch:
594 if not firstpatch:
595 firstpatch = m['Message-Id']
595 firstpatch = m['Message-Id']
596 m['X-Mercurial-Series-Id'] = firstpatch
596 m['X-Mercurial-Series-Id'] = firstpatch
597 except TypeError:
597 except TypeError:
598 m['Message-Id'] = genmsgid('patchbomb')
598 m['Message-Id'] = genmsgid('patchbomb')
599 if parent:
599 if parent:
600 m['In-Reply-To'] = parent
600 m['In-Reply-To'] = parent
601 m['References'] = parent
601 m['References'] = parent
602 if not parent or 'X-Mercurial-Node' not in m:
602 if not parent or 'X-Mercurial-Node' not in m:
603 parent = m['Message-Id']
603 parent = m['Message-Id']
604
604
605 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
605 m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
606 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
606 m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
607
607
608 start_time = (start_time[0] + 1, start_time[1])
608 start_time = (start_time[0] + 1, start_time[1])
609 m['From'] = sender
609 m['From'] = sender
610 m['To'] = ', '.join(to)
610 m['To'] = ', '.join(to)
611 if cc:
611 if cc:
612 m['Cc'] = ', '.join(cc)
612 m['Cc'] = ', '.join(cc)
613 if bcc:
613 if bcc:
614 m['Bcc'] = ', '.join(bcc)
614 m['Bcc'] = ', '.join(bcc)
615 if replyto:
615 if replyto:
616 m['Reply-To'] = ', '.join(replyto)
616 m['Reply-To'] = ', '.join(replyto)
617 if opts.get('test'):
617 if opts.get('test'):
618 ui.status(_('displaying '), subj, ' ...\n')
618 ui.status(_('displaying '), subj, ' ...\n')
619 ui.flush()
619 ui.flush()
620 if 'PAGER' in os.environ and not ui.plain():
620 if 'PAGER' in os.environ and not ui.plain():
621 fp = util.popen(os.environ['PAGER'], 'w')
621 fp = util.popen(os.environ['PAGER'], 'w')
622 else:
622 else:
623 fp = ui
623 fp = ui
624 generator = email.Generator.Generator(fp, mangle_from_=False)
624 generator = email.Generator.Generator(fp, mangle_from_=False)
625 try:
625 try:
626 generator.flatten(m, 0)
626 generator.flatten(m, 0)
627 fp.write('\n')
627 fp.write('\n')
628 except IOError, inst:
628 except IOError, inst:
629 if inst.errno != errno.EPIPE:
629 if inst.errno != errno.EPIPE:
630 raise
630 raise
631 if fp is not ui:
631 if fp is not ui:
632 fp.close()
632 fp.close()
633 else:
633 else:
634 if not sendmail:
634 if not sendmail:
635 verifycert = ui.config('smtp', 'verifycert')
635 verifycert = ui.config('smtp', 'verifycert')
636 if opts.get('insecure'):
636 if opts.get('insecure'):
637 ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
637 ui.setconfig('smtp', 'verifycert', 'loose', 'patchbomb')
638 try:
638 try:
639 sendmail = mail.connect(ui, mbox=mbox)
639 sendmail = mail.connect(ui, mbox=mbox)
640 finally:
640 finally:
641 ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
641 ui.setconfig('smtp', 'verifycert', verifycert, 'patchbomb')
642 ui.status(_('sending '), subj, ' ...\n')
642 ui.status(_('sending '), subj, ' ...\n')
643 ui.progress(_('sending'), i, item=subj, total=len(msgs))
643 ui.progress(_('sending'), i, item=subj, total=len(msgs))
644 if not mbox:
644 if not mbox:
645 # Exim does not remove the Bcc field
645 # Exim does not remove the Bcc field
646 del m['Bcc']
646 del m['Bcc']
647 fp = cStringIO.StringIO()
647 fp = cStringIO.StringIO()
648 generator = email.Generator.Generator(fp, mangle_from_=False)
648 generator = email.Generator.Generator(fp, mangle_from_=False)
649 generator.flatten(m, 0)
649 generator.flatten(m, 0)
650 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
650 sendmail(sender_addr, to + bcc + cc, fp.getvalue())
651
651
652 ui.progress(_('writing'), None)
652 ui.progress(_('writing'), None)
653 ui.progress(_('sending'), None)
653 ui.progress(_('sending'), None)
@@ -1,1074 +1,1074 b''
1 # rebase.py - rebasing feature for mercurial
1 # rebase.py - rebasing feature for mercurial
2 #
2 #
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
3 # Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''command to move sets of revisions to a different ancestor
8 '''command to move sets of revisions to a different ancestor
9
9
10 This extension lets you rebase changesets in an existing Mercurial
10 This extension lets you rebase changesets in an existing Mercurial
11 repository.
11 repository.
12
12
13 For more information:
13 For more information:
14 http://mercurial.selenic.com/wiki/RebaseExtension
14 http://mercurial.selenic.com/wiki/RebaseExtension
15 '''
15 '''
16
16
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
17 from mercurial import hg, util, repair, merge, cmdutil, commands, bookmarks
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
18 from mercurial import extensions, patch, scmutil, phases, obsolete, error
19 from mercurial import copies
19 from mercurial import copies
20 from mercurial.commands import templateopts
20 from mercurial.commands import templateopts
21 from mercurial.node import nullrev, nullid, hex, short
21 from mercurial.node import nullrev, nullid, hex, short
22 from mercurial.lock import release
22 from mercurial.lock import release
23 from mercurial.i18n import _
23 from mercurial.i18n import _
24 import os, errno
24 import os, errno
25
25
26 revtodo = -1
26 revtodo = -1
27 nullmerge = -2
27 nullmerge = -2
28 revignored = -3
28 revignored = -3
29
29
30 cmdtable = {}
30 cmdtable = {}
31 command = cmdutil.command(cmdtable)
31 command = cmdutil.command(cmdtable)
32 testedwith = 'internal'
32 testedwith = 'internal'
33
33
34 def _savegraft(ctx, extra):
34 def _savegraft(ctx, extra):
35 s = ctx.extra().get('source', None)
35 s = ctx.extra().get('source', None)
36 if s is not None:
36 if s is not None:
37 extra['source'] = s
37 extra['source'] = s
38
38
39 def _savebranch(ctx, extra):
39 def _savebranch(ctx, extra):
40 extra['branch'] = ctx.branch()
40 extra['branch'] = ctx.branch()
41
41
42 def _makeextrafn(copiers):
42 def _makeextrafn(copiers):
43 """make an extrafn out of the given copy-functions.
43 """make an extrafn out of the given copy-functions.
44
44
45 A copy function takes a context and an extra dict, and mutates the
45 A copy function takes a context and an extra dict, and mutates the
46 extra dict as needed based on the given context.
46 extra dict as needed based on the given context.
47 """
47 """
48 def extrafn(ctx, extra):
48 def extrafn(ctx, extra):
49 for c in copiers:
49 for c in copiers:
50 c(ctx, extra)
50 c(ctx, extra)
51 return extrafn
51 return extrafn
52
52
53 @command('rebase',
53 @command('rebase',
54 [('s', 'source', '',
54 [('s', 'source', '',
55 _('rebase the specified changeset and descendants'), _('REV')),
55 _('rebase the specified changeset and descendants'), _('REV')),
56 ('b', 'base', '',
56 ('b', 'base', '',
57 _('rebase everything from branching point of specified changeset'),
57 _('rebase everything from branching point of specified changeset'),
58 _('REV')),
58 _('REV')),
59 ('r', 'rev', [],
59 ('r', 'rev', [],
60 _('rebase these revisions'),
60 _('rebase these revisions'),
61 _('REV')),
61 _('REV')),
62 ('d', 'dest', '',
62 ('d', 'dest', '',
63 _('rebase onto the specified changeset'), _('REV')),
63 _('rebase onto the specified changeset'), _('REV')),
64 ('', 'collapse', False, _('collapse the rebased changesets')),
64 ('', 'collapse', False, _('collapse the rebased changesets')),
65 ('m', 'message', '',
65 ('m', 'message', '',
66 _('use text as collapse commit message'), _('TEXT')),
66 _('use text as collapse commit message'), _('TEXT')),
67 ('e', 'edit', False, _('invoke editor on commit messages')),
67 ('e', 'edit', False, _('invoke editor on commit messages')),
68 ('l', 'logfile', '',
68 ('l', 'logfile', '',
69 _('read collapse commit message from file'), _('FILE')),
69 _('read collapse commit message from file'), _('FILE')),
70 ('', 'keep', False, _('keep original changesets')),
70 ('', 'keep', False, _('keep original changesets')),
71 ('', 'keepbranches', False, _('keep original branch names')),
71 ('', 'keepbranches', False, _('keep original branch names')),
72 ('D', 'detach', False, _('(DEPRECATED)')),
72 ('D', 'detach', False, _('(DEPRECATED)')),
73 ('i', 'interactive', False, _('(DEPRECATED)')),
73 ('i', 'interactive', False, _('(DEPRECATED)')),
74 ('t', 'tool', '', _('specify merge tool')),
74 ('t', 'tool', '', _('specify merge tool')),
75 ('c', 'continue', False, _('continue an interrupted rebase')),
75 ('c', 'continue', False, _('continue an interrupted rebase')),
76 ('a', 'abort', False, _('abort an interrupted rebase'))] +
76 ('a', 'abort', False, _('abort an interrupted rebase'))] +
77 templateopts,
77 templateopts,
78 _('[-s REV | -b REV] [-d REV] [OPTION]'))
78 _('[-s REV | -b REV] [-d REV] [OPTION]'))
79 def rebase(ui, repo, **opts):
79 def rebase(ui, repo, **opts):
80 """move changeset (and descendants) to a different branch
80 """move changeset (and descendants) to a different branch
81
81
82 Rebase uses repeated merging to graft changesets from one part of
82 Rebase uses repeated merging to graft changesets from one part of
83 history (the source) onto another (the destination). This can be
83 history (the source) onto another (the destination). This can be
84 useful for linearizing *local* changes relative to a master
84 useful for linearizing *local* changes relative to a master
85 development tree.
85 development tree.
86
86
87 You should not rebase changesets that have already been shared
87 You should not rebase changesets that have already been shared
88 with others. Doing so will force everybody else to perform the
88 with others. Doing so will force everybody else to perform the
89 same rebase or they will end up with duplicated changesets after
89 same rebase or they will end up with duplicated changesets after
90 pulling in your rebased changesets.
90 pulling in your rebased changesets.
91
91
92 In its default configuration, Mercurial will prevent you from
92 In its default configuration, Mercurial will prevent you from
93 rebasing published changes. See :hg:`help phases` for details.
93 rebasing published changes. See :hg:`help phases` for details.
94
94
95 If you don't specify a destination changeset (``-d/--dest``),
95 If you don't specify a destination changeset (``-d/--dest``),
96 rebase uses the current branch tip as the destination. (The
96 rebase uses the current branch tip as the destination. (The
97 destination changeset is not modified by rebasing, but new
97 destination changeset is not modified by rebasing, but new
98 changesets are added as its descendants.)
98 changesets are added as its descendants.)
99
99
100 You can specify which changesets to rebase in two ways: as a
100 You can specify which changesets to rebase in two ways: as a
101 "source" changeset or as a "base" changeset. Both are shorthand
101 "source" changeset or as a "base" changeset. Both are shorthand
102 for a topologically related set of changesets (the "source
102 for a topologically related set of changesets (the "source
103 branch"). If you specify source (``-s/--source``), rebase will
103 branch"). If you specify source (``-s/--source``), rebase will
104 rebase that changeset and all of its descendants onto dest. If you
104 rebase that changeset and all of its descendants onto dest. If you
105 specify base (``-b/--base``), rebase will select ancestors of base
105 specify base (``-b/--base``), rebase will select ancestors of base
106 back to but not including the common ancestor with dest. Thus,
106 back to but not including the common ancestor with dest. Thus,
107 ``-b`` is less precise but more convenient than ``-s``: you can
107 ``-b`` is less precise but more convenient than ``-s``: you can
108 specify any changeset in the source branch, and rebase will select
108 specify any changeset in the source branch, and rebase will select
109 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
109 the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
110 uses the parent of the working directory as the base.
110 uses the parent of the working directory as the base.
111
111
112 For advanced usage, a third way is available through the ``--rev``
112 For advanced usage, a third way is available through the ``--rev``
113 option. It allows you to specify an arbitrary set of changesets to
113 option. It allows you to specify an arbitrary set of changesets to
114 rebase. Descendants of revs you specify with this option are not
114 rebase. Descendants of revs you specify with this option are not
115 automatically included in the rebase.
115 automatically included in the rebase.
116
116
117 By default, rebase recreates the changesets in the source branch
117 By default, rebase recreates the changesets in the source branch
118 as descendants of dest and then destroys the originals. Use
118 as descendants of dest and then destroys the originals. Use
119 ``--keep`` to preserve the original source changesets. Some
119 ``--keep`` to preserve the original source changesets. Some
120 changesets in the source branch (e.g. merges from the destination
120 changesets in the source branch (e.g. merges from the destination
121 branch) may be dropped if they no longer contribute any change.
121 branch) may be dropped if they no longer contribute any change.
122
122
123 One result of the rules for selecting the destination changeset
123 One result of the rules for selecting the destination changeset
124 and source branch is that, unlike ``merge``, rebase will do
124 and source branch is that, unlike ``merge``, rebase will do
125 nothing if you are at the branch tip of a named branch
125 nothing if you are at the branch tip of a named branch
126 with two heads. You need to explicitly specify source and/or
126 with two heads. You need to explicitly specify source and/or
127 destination (or ``update`` to the other head, if it's the head of
127 destination (or ``update`` to the other head, if it's the head of
128 the intended source branch).
128 the intended source branch).
129
129
130 If a rebase is interrupted to manually resolve a merge, it can be
130 If a rebase is interrupted to manually resolve a merge, it can be
131 continued with --continue/-c or aborted with --abort/-a.
131 continued with --continue/-c or aborted with --abort/-a.
132
132
133 .. container:: verbose
133 .. container:: verbose
134
134
135 Examples:
135 Examples:
136
136
137 - move "local changes" (current commit back to branching point)
137 - move "local changes" (current commit back to branching point)
138 to the current branch tip after a pull::
138 to the current branch tip after a pull::
139
139
140 hg rebase
140 hg rebase
141
141
142 - move a single changeset to the stable branch::
142 - move a single changeset to the stable branch::
143
143
144 hg rebase -r 5f493448 -d stable
144 hg rebase -r 5f493448 -d stable
145
145
146 - splice a commit and all its descendants onto another part of history::
146 - splice a commit and all its descendants onto another part of history::
147
147
148 hg rebase --source c0c3 --dest 4cf9
148 hg rebase --source c0c3 --dest 4cf9
149
149
150 - rebase everything on a branch marked by a bookmark onto the
150 - rebase everything on a branch marked by a bookmark onto the
151 default branch::
151 default branch::
152
152
153 hg rebase --base myfeature --dest default
153 hg rebase --base myfeature --dest default
154
154
155 - collapse a sequence of changes into a single commit::
155 - collapse a sequence of changes into a single commit::
156
156
157 hg rebase --collapse -r 1520:1525 -d .
157 hg rebase --collapse -r 1520:1525 -d .
158
158
159 - move a named branch while preserving its name::
159 - move a named branch while preserving its name::
160
160
161 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
161 hg rebase -r "branch(featureX)" -d 1.3 --keepbranches
162
162
163 Returns 0 on success, 1 if nothing to rebase or there are
163 Returns 0 on success, 1 if nothing to rebase or there are
164 unresolved conflicts.
164 unresolved conflicts.
165
165
166 """
166 """
167 originalwd = target = None
167 originalwd = target = None
168 activebookmark = None
168 activebookmark = None
169 external = nullrev
169 external = nullrev
170 state = {}
170 state = {}
171 skipped = set()
171 skipped = set()
172 targetancestors = set()
172 targetancestors = set()
173
173
174
174
175 lock = wlock = None
175 lock = wlock = None
176 try:
176 try:
177 wlock = repo.wlock()
177 wlock = repo.wlock()
178 lock = repo.lock()
178 lock = repo.lock()
179
179
180 # Validate input and define rebasing points
180 # Validate input and define rebasing points
181 destf = opts.get('dest', None)
181 destf = opts.get('dest', None)
182 srcf = opts.get('source', None)
182 srcf = opts.get('source', None)
183 basef = opts.get('base', None)
183 basef = opts.get('base', None)
184 revf = opts.get('rev', [])
184 revf = opts.get('rev', [])
185 contf = opts.get('continue')
185 contf = opts.get('continue')
186 abortf = opts.get('abort')
186 abortf = opts.get('abort')
187 collapsef = opts.get('collapse', False)
187 collapsef = opts.get('collapse', False)
188 collapsemsg = cmdutil.logmessage(ui, opts)
188 collapsemsg = cmdutil.logmessage(ui, opts)
189 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
189 e = opts.get('extrafn') # internal, used by e.g. hgsubversion
190 extrafns = [_savegraft]
190 extrafns = [_savegraft]
191 if e:
191 if e:
192 extrafns = [e]
192 extrafns = [e]
193 keepf = opts.get('keep', False)
193 keepf = opts.get('keep', False)
194 keepbranchesf = opts.get('keepbranches', False)
194 keepbranchesf = opts.get('keepbranches', False)
195 # keepopen is not meant for use on the command line, but by
195 # keepopen is not meant for use on the command line, but by
196 # other extensions
196 # other extensions
197 keepopen = opts.get('keepopen', False)
197 keepopen = opts.get('keepopen', False)
198
198
199 if opts.get('interactive'):
199 if opts.get('interactive'):
200 msg = _("interactive history editing is supported by the "
200 msg = _("interactive history editing is supported by the "
201 "'histedit' extension (see 'hg help histedit')")
201 "'histedit' extension (see 'hg help histedit')")
202 raise util.Abort(msg)
202 raise util.Abort(msg)
203
203
204 if collapsemsg and not collapsef:
204 if collapsemsg and not collapsef:
205 raise util.Abort(
205 raise util.Abort(
206 _('message can only be specified with collapse'))
206 _('message can only be specified with collapse'))
207
207
208 if contf or abortf:
208 if contf or abortf:
209 if contf and abortf:
209 if contf and abortf:
210 raise util.Abort(_('cannot use both abort and continue'))
210 raise util.Abort(_('cannot use both abort and continue'))
211 if collapsef:
211 if collapsef:
212 raise util.Abort(
212 raise util.Abort(
213 _('cannot use collapse with continue or abort'))
213 _('cannot use collapse with continue or abort'))
214 if srcf or basef or destf:
214 if srcf or basef or destf:
215 raise util.Abort(
215 raise util.Abort(
216 _('abort and continue do not allow specifying revisions'))
216 _('abort and continue do not allow specifying revisions'))
217 if opts.get('tool', False):
217 if opts.get('tool', False):
218 ui.warn(_('tool option will be ignored\n'))
218 ui.warn(_('tool option will be ignored\n'))
219
219
220 try:
220 try:
221 (originalwd, target, state, skipped, collapsef, keepf,
221 (originalwd, target, state, skipped, collapsef, keepf,
222 keepbranchesf, external, activebookmark) = restorestatus(repo)
222 keepbranchesf, external, activebookmark) = restorestatus(repo)
223 except error.RepoLookupError:
223 except error.RepoLookupError:
224 if abortf:
224 if abortf:
225 clearstatus(repo)
225 clearstatus(repo)
226 repo.ui.warn(_('rebase aborted (no revision is removed,'
226 repo.ui.warn(_('rebase aborted (no revision is removed,'
227 ' only broken state is cleared)\n'))
227 ' only broken state is cleared)\n'))
228 return 0
228 return 0
229 else:
229 else:
230 msg = _('cannot continue inconsistent rebase')
230 msg = _('cannot continue inconsistent rebase')
231 hint = _('use "hg rebase --abort" to clear broken state')
231 hint = _('use "hg rebase --abort" to clear broken state')
232 raise util.Abort(msg, hint=hint)
232 raise util.Abort(msg, hint=hint)
233 if abortf:
233 if abortf:
234 return abort(repo, originalwd, target, state)
234 return abort(repo, originalwd, target, state)
235 else:
235 else:
236 if srcf and basef:
236 if srcf and basef:
237 raise util.Abort(_('cannot specify both a '
237 raise util.Abort(_('cannot specify both a '
238 'source and a base'))
238 'source and a base'))
239 if revf and basef:
239 if revf and basef:
240 raise util.Abort(_('cannot specify both a '
240 raise util.Abort(_('cannot specify both a '
241 'revision and a base'))
241 'revision and a base'))
242 if revf and srcf:
242 if revf and srcf:
243 raise util.Abort(_('cannot specify both a '
243 raise util.Abort(_('cannot specify both a '
244 'revision and a source'))
244 'revision and a source'))
245
245
246 cmdutil.checkunfinished(repo)
246 cmdutil.checkunfinished(repo)
247 cmdutil.bailifchanged(repo)
247 cmdutil.bailifchanged(repo)
248
248
249 if not destf:
249 if not destf:
250 # Destination defaults to the latest revision in the
250 # Destination defaults to the latest revision in the
251 # current branch
251 # current branch
252 branch = repo[None].branch()
252 branch = repo[None].branch()
253 dest = repo[branch]
253 dest = repo[branch]
254 else:
254 else:
255 dest = scmutil.revsingle(repo, destf)
255 dest = scmutil.revsingle(repo, destf)
256
256
257 if revf:
257 if revf:
258 rebaseset = scmutil.revrange(repo, revf)
258 rebaseset = scmutil.revrange(repo, revf)
259 if not rebaseset:
259 if not rebaseset:
260 ui.status(_('empty "rev" revision set - '
260 ui.status(_('empty "rev" revision set - '
261 'nothing to rebase\n'))
261 'nothing to rebase\n'))
262 return 1
262 return 1
263 elif srcf:
263 elif srcf:
264 src = scmutil.revrange(repo, [srcf])
264 src = scmutil.revrange(repo, [srcf])
265 if not src:
265 if not src:
266 ui.status(_('empty "source" revision set - '
266 ui.status(_('empty "source" revision set - '
267 'nothing to rebase\n'))
267 'nothing to rebase\n'))
268 return 1
268 return 1
269 rebaseset = repo.revs('(%ld)::', src)
269 rebaseset = repo.revs('(%ld)::', src)
270 assert rebaseset
270 assert rebaseset
271 else:
271 else:
272 base = scmutil.revrange(repo, [basef or '.'])
272 base = scmutil.revrange(repo, [basef or '.'])
273 if not base:
273 if not base:
274 ui.status(_('empty "base" revision set - '
274 ui.status(_('empty "base" revision set - '
275 "can't compute rebase set\n"))
275 "can't compute rebase set\n"))
276 return 1
276 return 1
277 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
277 commonanc = repo.revs('ancestor(%ld, %d)', base, dest).first()
278 if commonanc is not None:
278 if commonanc is not None:
279 rebaseset = repo.revs('(%d::(%ld) - %d)::',
279 rebaseset = repo.revs('(%d::(%ld) - %d)::',
280 commonanc, base, commonanc)
280 commonanc, base, commonanc)
281 else:
281 else:
282 rebaseset = []
282 rebaseset = []
283
283
284 if not rebaseset:
284 if not rebaseset:
285 # transform to list because smartsets are not comparable to
285 # transform to list because smartsets are not comparable to
286 # lists. This should be improved to honor laziness of
286 # lists. This should be improved to honor laziness of
287 # smartset.
287 # smartset.
288 if list(base) == [dest.rev()]:
288 if list(base) == [dest.rev()]:
289 if basef:
289 if basef:
290 ui.status(_('nothing to rebase - %s is both "base"'
290 ui.status(_('nothing to rebase - %s is both "base"'
291 ' and destination\n') % dest)
291 ' and destination\n') % dest)
292 else:
292 else:
293 ui.status(_('nothing to rebase - working directory '
293 ui.status(_('nothing to rebase - working directory '
294 'parent is also destination\n'))
294 'parent is also destination\n'))
295 elif not repo.revs('%ld - ::%d', base, dest):
295 elif not repo.revs('%ld - ::%d', base, dest):
296 if basef:
296 if basef:
297 ui.status(_('nothing to rebase - "base" %s is '
297 ui.status(_('nothing to rebase - "base" %s is '
298 'already an ancestor of destination '
298 'already an ancestor of destination '
299 '%s\n') %
299 '%s\n') %
300 ('+'.join(str(repo[r]) for r in base),
300 ('+'.join(str(repo[r]) for r in base),
301 dest))
301 dest))
302 else:
302 else:
303 ui.status(_('nothing to rebase - working '
303 ui.status(_('nothing to rebase - working '
304 'directory parent is already an '
304 'directory parent is already an '
305 'ancestor of destination %s\n') % dest)
305 'ancestor of destination %s\n') % dest)
306 else: # can it happen?
306 else: # can it happen?
307 ui.status(_('nothing to rebase from %s to %s\n') %
307 ui.status(_('nothing to rebase from %s to %s\n') %
308 ('+'.join(str(repo[r]) for r in base), dest))
308 ('+'.join(str(repo[r]) for r in base), dest))
309 return 1
309 return 1
310
310
311 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
311 allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt)
312 if (not (keepf or allowunstable)
312 if (not (keepf or allowunstable)
313 and repo.revs('first(children(%ld) - %ld)',
313 and repo.revs('first(children(%ld) - %ld)',
314 rebaseset, rebaseset)):
314 rebaseset, rebaseset)):
315 raise util.Abort(
315 raise util.Abort(
316 _("can't remove original changesets with"
316 _("can't remove original changesets with"
317 " unrebased descendants"),
317 " unrebased descendants"),
318 hint=_('use --keep to keep original changesets'))
318 hint=_('use --keep to keep original changesets'))
319
319
320 result = buildstate(repo, dest, rebaseset, collapsef)
320 result = buildstate(repo, dest, rebaseset, collapsef)
321 if not result:
321 if not result:
322 # Empty state built, nothing to rebase
322 # Empty state built, nothing to rebase
323 ui.status(_('nothing to rebase\n'))
323 ui.status(_('nothing to rebase\n'))
324 return 1
324 return 1
325
325
326 root = min(rebaseset)
326 root = min(rebaseset)
327 if not keepf and not repo[root].mutable():
327 if not keepf and not repo[root].mutable():
328 raise util.Abort(_("can't rebase immutable changeset %s")
328 raise util.Abort(_("can't rebase immutable changeset %s")
329 % repo[root],
329 % repo[root],
330 hint=_('see hg help phases for details'))
330 hint=_('see hg help phases for details'))
331
331
332 originalwd, target, state = result
332 originalwd, target, state = result
333 if collapsef:
333 if collapsef:
334 targetancestors = repo.changelog.ancestors([target],
334 targetancestors = repo.changelog.ancestors([target],
335 inclusive=True)
335 inclusive=True)
336 external = externalparent(repo, state, targetancestors)
336 external = externalparent(repo, state, targetancestors)
337
337
338 if dest.closesbranch() and not keepbranchesf:
338 if dest.closesbranch() and not keepbranchesf:
339 ui.status(_('reopening closed branch head %s\n') % dest)
339 ui.status(_('reopening closed branch head %s\n') % dest)
340
340
341 if keepbranchesf:
341 if keepbranchesf:
342 # insert _savebranch at the start of extrafns so if
342 # insert _savebranch at the start of extrafns so if
343 # there's a user-provided extrafn it can clobber branch if
343 # there's a user-provided extrafn it can clobber branch if
344 # desired
344 # desired
345 extrafns.insert(0, _savebranch)
345 extrafns.insert(0, _savebranch)
346 if collapsef:
346 if collapsef:
347 branches = set()
347 branches = set()
348 for rev in state:
348 for rev in state:
349 branches.add(repo[rev].branch())
349 branches.add(repo[rev].branch())
350 if len(branches) > 1:
350 if len(branches) > 1:
351 raise util.Abort(_('cannot collapse multiple named '
351 raise util.Abort(_('cannot collapse multiple named '
352 'branches'))
352 'branches'))
353
353
354 # Rebase
354 # Rebase
355 if not targetancestors:
355 if not targetancestors:
356 targetancestors = repo.changelog.ancestors([target], inclusive=True)
356 targetancestors = repo.changelog.ancestors([target], inclusive=True)
357
357
358 # Keep track of the current bookmarks in order to reset them later
358 # Keep track of the current bookmarks in order to reset them later
359 currentbookmarks = repo._bookmarks.copy()
359 currentbookmarks = repo._bookmarks.copy()
360 activebookmark = activebookmark or repo._bookmarkcurrent
360 activebookmark = activebookmark or repo._bookmarkcurrent
361 if activebookmark:
361 if activebookmark:
362 bookmarks.unsetcurrent(repo)
362 bookmarks.unsetcurrent(repo)
363
363
364 extrafn = _makeextrafn(extrafns)
364 extrafn = _makeextrafn(extrafns)
365
365
366 sortedstate = sorted(state)
366 sortedstate = sorted(state)
367 total = len(sortedstate)
367 total = len(sortedstate)
368 pos = 0
368 pos = 0
369 for rev in sortedstate:
369 for rev in sortedstate:
370 ctx = repo[rev]
370 ctx = repo[rev]
371 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
371 desc = '%d:%s "%s"' % (ctx.rev(), ctx,
372 ctx.description().split('\n', 1)[0])
372 ctx.description().split('\n', 1)[0])
373 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
373 names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node())
374 if names:
374 if names:
375 desc += ' (%s)' % ' '.join(names)
375 desc += ' (%s)' % ' '.join(names)
376 pos += 1
376 pos += 1
377 if state[rev] == revtodo:
377 if state[rev] == revtodo:
378 ui.status(_('rebasing %s\n') % desc)
378 ui.status(_('rebasing %s\n') % desc)
379 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
379 ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, ctx)),
380 _('changesets'), total)
380 _('changesets'), total)
381 p1, p2, base = defineparents(repo, rev, target, state,
381 p1, p2, base = defineparents(repo, rev, target, state,
382 targetancestors)
382 targetancestors)
383 storestatus(repo, originalwd, target, state, collapsef, keepf,
383 storestatus(repo, originalwd, target, state, collapsef, keepf,
384 keepbranchesf, external, activebookmark)
384 keepbranchesf, external, activebookmark)
385 if len(repo.parents()) == 2:
385 if len(repo.parents()) == 2:
386 repo.ui.debug('resuming interrupted rebase\n')
386 repo.ui.debug('resuming interrupted rebase\n')
387 else:
387 else:
388 try:
388 try:
389 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
389 ui.setconfig('ui', 'forcemerge', opts.get('tool', ''),
390 'rebase')
390 'rebase')
391 stats = rebasenode(repo, rev, p1, base, state,
391 stats = rebasenode(repo, rev, p1, base, state,
392 collapsef, target)
392 collapsef, target)
393 if stats and stats[3] > 0:
393 if stats and stats[3] > 0:
394 raise error.InterventionRequired(
394 raise error.InterventionRequired(
395 _('unresolved conflicts (see hg '
395 _('unresolved conflicts (see hg '
396 'resolve, then hg rebase --continue)'))
396 'resolve, then hg rebase --continue)'))
397 finally:
397 finally:
398 ui.setconfig('ui', 'forcemerge', '', 'rebase')
398 ui.setconfig('ui', 'forcemerge', '', 'rebase')
399 if not collapsef:
399 if not collapsef:
400 merging = p2 != nullrev
400 merging = p2 != nullrev
401 editform = cmdutil.mergeeditform(merging, 'rebase')
401 editform = cmdutil.mergeeditform(merging, 'rebase')
402 editor = cmdutil.getcommiteditor(editform=editform, **opts)
402 editor = cmdutil.getcommiteditor(editform=editform, **opts)
403 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
403 newnode = concludenode(repo, rev, p1, p2, extrafn=extrafn,
404 editor=editor)
404 editor=editor)
405 else:
405 else:
406 # Skip commit if we are collapsing
406 # Skip commit if we are collapsing
407 repo.dirstate.beginparentchange()
407 repo.dirstate.beginparentchange()
408 repo.setparents(repo[p1].node())
408 repo.setparents(repo[p1].node())
409 repo.dirstate.endparentchange()
409 repo.dirstate.endparentchange()
410 newnode = None
410 newnode = None
411 # Update the state
411 # Update the state
412 if newnode is not None:
412 if newnode is not None:
413 state[rev] = repo[newnode].rev()
413 state[rev] = repo[newnode].rev()
414 ui.debug('rebased as %s\n' % short(newnode))
414 ui.debug('rebased as %s\n' % short(newnode))
415 else:
415 else:
416 ui.warn(_('note: rebase of %d:%s created no changes '
416 ui.warn(_('note: rebase of %d:%s created no changes '
417 'to commit\n') % (rev, ctx))
417 'to commit\n') % (rev, ctx))
418 if not collapsef:
418 if not collapsef:
419 skipped.add(rev)
419 skipped.add(rev)
420 state[rev] = p1
420 state[rev] = p1
421 ui.debug('next revision set to %s\n' % p1)
421 ui.debug('next revision set to %s\n' % p1)
422 elif state[rev] == nullmerge:
422 elif state[rev] == nullmerge:
423 ui.debug('ignoring null merge rebase of %s\n' % rev)
423 ui.debug('ignoring null merge rebase of %s\n' % rev)
424 elif state[rev] == revignored:
424 elif state[rev] == revignored:
425 ui.status(_('not rebasing ignored %s\n') % desc)
425 ui.status(_('not rebasing ignored %s\n') % desc)
426 else:
426 else:
427 ui.status(_('already rebased %s as %s\n') %
427 ui.status(_('already rebased %s as %s\n') %
428 (desc, repo[state[rev]]))
428 (desc, repo[state[rev]]))
429
429
430 ui.progress(_('rebasing'), None)
430 ui.progress(_('rebasing'), None)
431 ui.note(_('rebase merging completed\n'))
431 ui.note(_('rebase merging completed\n'))
432
432
433 if collapsef and not keepopen:
433 if collapsef and not keepopen:
434 p1, p2, _base = defineparents(repo, min(state), target,
434 p1, p2, _base = defineparents(repo, min(state), target,
435 state, targetancestors)
435 state, targetancestors)
436 editopt = opts.get('edit')
436 editopt = opts.get('edit')
437 editform = 'rebase.collapse'
437 editform = 'rebase.collapse'
438 if collapsemsg:
438 if collapsemsg:
439 commitmsg = collapsemsg
439 commitmsg = collapsemsg
440 else:
440 else:
441 commitmsg = 'Collapsed revision'
441 commitmsg = 'Collapsed revision'
442 for rebased in state:
442 for rebased in state:
443 if rebased not in skipped and state[rebased] > nullmerge:
443 if rebased not in skipped and state[rebased] > nullmerge:
444 commitmsg += '\n* %s' % repo[rebased].description()
444 commitmsg += '\n* %s' % repo[rebased].description()
445 editopt = True
445 editopt = True
446 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
446 editor = cmdutil.getcommiteditor(edit=editopt, editform=editform)
447 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
447 newnode = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
448 extrafn=extrafn, editor=editor)
448 extrafn=extrafn, editor=editor)
449 if newnode is None:
449 if newnode is None:
450 newrev = target
450 newrev = target
451 else:
451 else:
452 newrev = repo[newnode].rev()
452 newrev = repo[newnode].rev()
453 for oldrev in state.iterkeys():
453 for oldrev in state.iterkeys():
454 if state[oldrev] > nullmerge:
454 if state[oldrev] > nullmerge:
455 state[oldrev] = newrev
455 state[oldrev] = newrev
456
456
457 if 'qtip' in repo.tags():
457 if 'qtip' in repo.tags():
458 updatemq(repo, state, skipped, **opts)
458 updatemq(repo, state, skipped, **opts)
459
459
460 if currentbookmarks:
460 if currentbookmarks:
461 # Nodeids are needed to reset bookmarks
461 # Nodeids are needed to reset bookmarks
462 nstate = {}
462 nstate = {}
463 for k, v in state.iteritems():
463 for k, v in state.iteritems():
464 if v > nullmerge:
464 if v > nullmerge:
465 nstate[repo[k].node()] = repo[v].node()
465 nstate[repo[k].node()] = repo[v].node()
466 # XXX this is the same as dest.node() for the non-continue path --
466 # XXX this is the same as dest.node() for the non-continue path --
467 # this should probably be cleaned up
467 # this should probably be cleaned up
468 targetnode = repo[target].node()
468 targetnode = repo[target].node()
469
469
470 # restore original working directory
470 # restore original working directory
471 # (we do this before stripping)
471 # (we do this before stripping)
472 newwd = state.get(originalwd, originalwd)
472 newwd = state.get(originalwd, originalwd)
473 if newwd < 0:
473 if newwd < 0:
474 # original directory is a parent of rebase set root or ignored
474 # original directory is a parent of rebase set root or ignored
475 newwd = originalwd
475 newwd = originalwd
476 if newwd not in [c.rev() for c in repo[None].parents()]:
476 if newwd not in [c.rev() for c in repo[None].parents()]:
477 ui.note(_("update back to initial working directory parent\n"))
477 ui.note(_("update back to initial working directory parent\n"))
478 hg.updaterepo(repo, newwd, False)
478 hg.updaterepo(repo, newwd, False)
479
479
480 if not keepf:
480 if not keepf:
481 collapsedas = None
481 collapsedas = None
482 if collapsef:
482 if collapsef:
483 collapsedas = newnode
483 collapsedas = newnode
484 clearrebased(ui, repo, state, skipped, collapsedas)
484 clearrebased(ui, repo, state, skipped, collapsedas)
485
485
486 if currentbookmarks:
486 if currentbookmarks:
487 updatebookmarks(repo, targetnode, nstate, currentbookmarks)
487 updatebookmarks(repo, targetnode, nstate, currentbookmarks)
488 if activebookmark not in repo._bookmarks:
488 if activebookmark not in repo._bookmarks:
489 # active bookmark was divergent one and has been deleted
489 # active bookmark was divergent one and has been deleted
490 activebookmark = None
490 activebookmark = None
491
491
492 clearstatus(repo)
492 clearstatus(repo)
493 ui.note(_("rebase completed\n"))
493 ui.note(_("rebase completed\n"))
494 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
494 util.unlinkpath(repo.sjoin('undo'), ignoremissing=True)
495 if skipped:
495 if skipped:
496 ui.note(_("%d revisions have been skipped\n") % len(skipped))
496 ui.note(_("%d revisions have been skipped\n") % len(skipped))
497
497
498 if (activebookmark and
498 if (activebookmark and
499 repo['.'].node() == repo._bookmarks[activebookmark]):
499 repo['.'].node() == repo._bookmarks[activebookmark]):
500 bookmarks.setcurrent(repo, activebookmark)
500 bookmarks.setcurrent(repo, activebookmark)
501
501
502 finally:
502 finally:
503 release(lock, wlock)
503 release(lock, wlock)
504
504
505 def externalparent(repo, state, targetancestors):
505 def externalparent(repo, state, targetancestors):
506 """Return the revision that should be used as the second parent
506 """Return the revision that should be used as the second parent
507 when the revisions in state is collapsed on top of targetancestors.
507 when the revisions in state is collapsed on top of targetancestors.
508 Abort if there is more than one parent.
508 Abort if there is more than one parent.
509 """
509 """
510 parents = set()
510 parents = set()
511 source = min(state)
511 source = min(state)
512 for rev in state:
512 for rev in state:
513 if rev == source:
513 if rev == source:
514 continue
514 continue
515 for p in repo[rev].parents():
515 for p in repo[rev].parents():
516 if (p.rev() not in state
516 if (p.rev() not in state
517 and p.rev() not in targetancestors):
517 and p.rev() not in targetancestors):
518 parents.add(p.rev())
518 parents.add(p.rev())
519 if not parents:
519 if not parents:
520 return nullrev
520 return nullrev
521 if len(parents) == 1:
521 if len(parents) == 1:
522 return parents.pop()
522 return parents.pop()
523 raise util.Abort(_('unable to collapse on top of %s, there is more '
523 raise util.Abort(_('unable to collapse on top of %s, there is more '
524 'than one external parent: %s') %
524 'than one external parent: %s') %
525 (max(targetancestors),
525 (max(targetancestors),
526 ', '.join(str(p) for p in sorted(parents))))
526 ', '.join(str(p) for p in sorted(parents))))
527
527
528 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
528 def concludenode(repo, rev, p1, p2, commitmsg=None, editor=None, extrafn=None):
529 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
529 '''Commit the wd changes with parents p1 and p2. Reuse commit info from rev
530 but also store useful information in extra.
530 but also store useful information in extra.
531 Return node of committed revision.'''
531 Return node of committed revision.'''
532 try:
532 try:
533 repo.dirstate.beginparentchange()
533 repo.dirstate.beginparentchange()
534 repo.setparents(repo[p1].node(), repo[p2].node())
534 repo.setparents(repo[p1].node(), repo[p2].node())
535 repo.dirstate.endparentchange()
535 repo.dirstate.endparentchange()
536 ctx = repo[rev]
536 ctx = repo[rev]
537 if commitmsg is None:
537 if commitmsg is None:
538 commitmsg = ctx.description()
538 commitmsg = ctx.description()
539 extra = {'rebase_source': ctx.hex()}
539 extra = {'rebase_source': ctx.hex()}
540 if extrafn:
540 if extrafn:
541 extrafn(ctx, extra)
541 extrafn(ctx, extra)
542
542
543 backup = repo.ui.backupconfig('phases', 'new-commit')
543 backup = repo.ui.backupconfig('phases', 'new-commit')
544 try:
544 try:
545 targetphase = max(ctx.phase(), phases.draft)
545 targetphase = max(ctx.phase(), phases.draft)
546 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
546 repo.ui.setconfig('phases', 'new-commit', targetphase, 'rebase')
547 # Commit might fail if unresolved files exist
547 # Commit might fail if unresolved files exist
548 newnode = repo.commit(text=commitmsg, user=ctx.user(),
548 newnode = repo.commit(text=commitmsg, user=ctx.user(),
549 date=ctx.date(), extra=extra, editor=editor)
549 date=ctx.date(), extra=extra, editor=editor)
550 finally:
550 finally:
551 repo.ui.restoreconfig(backup)
551 repo.ui.restoreconfig(backup)
552
552
553 repo.dirstate.setbranch(repo[newnode].branch())
553 repo.dirstate.setbranch(repo[newnode].branch())
554 return newnode
554 return newnode
555 except util.Abort:
555 except util.Abort:
556 # Invalidate the previous setparents
556 # Invalidate the previous setparents
557 repo.dirstate.invalidate()
557 repo.dirstate.invalidate()
558 raise
558 raise
559
559
560 def rebasenode(repo, rev, p1, base, state, collapse, target):
560 def rebasenode(repo, rev, p1, base, state, collapse, target):
561 'Rebase a single revision rev on top of p1 using base as merge ancestor'
561 'Rebase a single revision rev on top of p1 using base as merge ancestor'
562 # Merge phase
562 # Merge phase
563 # Update to target and merge it with local
563 # Update to target and merge it with local
564 if repo['.'].rev() != p1:
564 if repo['.'].rev() != p1:
565 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
565 repo.ui.debug(" update to %d:%s\n" % (p1, repo[p1]))
566 merge.update(repo, p1, False, True, False)
566 merge.update(repo, p1, False, True, False)
567 else:
567 else:
568 repo.ui.debug(" already in target\n")
568 repo.ui.debug(" already in target\n")
569 repo.dirstate.write()
569 repo.dirstate.write()
570 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
570 repo.ui.debug(" merge against %d:%s\n" % (rev, repo[rev]))
571 if base is not None:
571 if base is not None:
572 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
572 repo.ui.debug(" detach base %d:%s\n" % (base, repo[base]))
573 # When collapsing in-place, the parent is the common ancestor, we
573 # When collapsing in-place, the parent is the common ancestor, we
574 # have to allow merging with it.
574 # have to allow merging with it.
575 stats = merge.update(repo, rev, True, True, False, base, collapse,
575 stats = merge.update(repo, rev, True, True, False, base, collapse,
576 labels=['dest', 'source'])
576 labels=['dest', 'source'])
577 if collapse:
577 if collapse:
578 copies.duplicatecopies(repo, rev, target)
578 copies.duplicatecopies(repo, rev, target)
579 else:
579 else:
580 # If we're not using --collapse, we need to
580 # If we're not using --collapse, we need to
581 # duplicate copies between the revision we're
581 # duplicate copies between the revision we're
582 # rebasing and its first parent, but *not*
582 # rebasing and its first parent, but *not*
583 # duplicate any copies that have already been
583 # duplicate any copies that have already been
584 # performed in the destination.
584 # performed in the destination.
585 p1rev = repo[rev].p1().rev()
585 p1rev = repo[rev].p1().rev()
586 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
586 copies.duplicatecopies(repo, rev, p1rev, skiprev=target)
587 return stats
587 return stats
588
588
589 def nearestrebased(repo, rev, state):
589 def nearestrebased(repo, rev, state):
590 """return the nearest ancestors of rev in the rebase result"""
590 """return the nearest ancestors of rev in the rebase result"""
591 rebased = [r for r in state if state[r] > nullmerge]
591 rebased = [r for r in state if state[r] > nullmerge]
592 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
592 candidates = repo.revs('max(%ld and (::%d))', rebased, rev)
593 if candidates:
593 if candidates:
594 return state[candidates.first()]
594 return state[candidates.first()]
595 else:
595 else:
596 return None
596 return None
597
597
598 def defineparents(repo, rev, target, state, targetancestors):
598 def defineparents(repo, rev, target, state, targetancestors):
599 'Return the new parent relationship of the revision that will be rebased'
599 'Return the new parent relationship of the revision that will be rebased'
600 parents = repo[rev].parents()
600 parents = repo[rev].parents()
601 p1 = p2 = nullrev
601 p1 = p2 = nullrev
602
602
603 p1n = parents[0].rev()
603 p1n = parents[0].rev()
604 if p1n in targetancestors:
604 if p1n in targetancestors:
605 p1 = target
605 p1 = target
606 elif p1n in state:
606 elif p1n in state:
607 if state[p1n] == nullmerge:
607 if state[p1n] == nullmerge:
608 p1 = target
608 p1 = target
609 elif state[p1n] == revignored:
609 elif state[p1n] == revignored:
610 p1 = nearestrebased(repo, p1n, state)
610 p1 = nearestrebased(repo, p1n, state)
611 if p1 is None:
611 if p1 is None:
612 p1 = target
612 p1 = target
613 else:
613 else:
614 p1 = state[p1n]
614 p1 = state[p1n]
615 else: # p1n external
615 else: # p1n external
616 p1 = target
616 p1 = target
617 p2 = p1n
617 p2 = p1n
618
618
619 if len(parents) == 2 and parents[1].rev() not in targetancestors:
619 if len(parents) == 2 and parents[1].rev() not in targetancestors:
620 p2n = parents[1].rev()
620 p2n = parents[1].rev()
621 # interesting second parent
621 # interesting second parent
622 if p2n in state:
622 if p2n in state:
623 if p1 == target: # p1n in targetancestors or external
623 if p1 == target: # p1n in targetancestors or external
624 p1 = state[p2n]
624 p1 = state[p2n]
625 elif state[p2n] == revignored:
625 elif state[p2n] == revignored:
626 p2 = nearestrebased(repo, p2n, state)
626 p2 = nearestrebased(repo, p2n, state)
627 if p2 is None:
627 if p2 is None:
628 # no ancestors rebased yet, detach
628 # no ancestors rebased yet, detach
629 p2 = target
629 p2 = target
630 else:
630 else:
631 p2 = state[p2n]
631 p2 = state[p2n]
632 else: # p2n external
632 else: # p2n external
633 if p2 != nullrev: # p1n external too => rev is a merged revision
633 if p2 != nullrev: # p1n external too => rev is a merged revision
634 raise util.Abort(_('cannot use revision %d as base, result '
634 raise util.Abort(_('cannot use revision %d as base, result '
635 'would have 3 parents') % rev)
635 'would have 3 parents') % rev)
636 p2 = p2n
636 p2 = p2n
637 repo.ui.debug(" future parents are %d and %d\n" %
637 repo.ui.debug(" future parents are %d and %d\n" %
638 (repo[p1].rev(), repo[p2].rev()))
638 (repo[p1].rev(), repo[p2].rev()))
639
639
640 if rev == min(state):
640 if rev == min(state):
641 # Case (1) initial changeset of a non-detaching rebase.
641 # Case (1) initial changeset of a non-detaching rebase.
642 # Let the merge mechanism find the base itself.
642 # Let the merge mechanism find the base itself.
643 base = None
643 base = None
644 elif not repo[rev].p2():
644 elif not repo[rev].p2():
645 # Case (2) detaching the node with a single parent, use this parent
645 # Case (2) detaching the node with a single parent, use this parent
646 base = repo[rev].p1().rev()
646 base = repo[rev].p1().rev()
647 else:
647 else:
648 # Assuming there is a p1, this is the case where there also is a p2.
648 # Assuming there is a p1, this is the case where there also is a p2.
649 # We are thus rebasing a merge and need to pick the right merge base.
649 # We are thus rebasing a merge and need to pick the right merge base.
650 #
650 #
651 # Imagine we have:
651 # Imagine we have:
652 # - M: current rebase revision in this step
652 # - M: current rebase revision in this step
653 # - A: one parent of M
653 # - A: one parent of M
654 # - B: other parent of M
654 # - B: other parent of M
655 # - D: destination of this merge step (p1 var)
655 # - D: destination of this merge step (p1 var)
656 #
656 #
657 # Consider the case where D is a descendant of A or B and the other is
657 # Consider the case where D is a descendant of A or B and the other is
658 # 'outside'. In this case, the right merge base is the D ancestor.
658 # 'outside'. In this case, the right merge base is the D ancestor.
659 #
659 #
660 # An informal proof, assuming A is 'outside' and B is the D ancestor:
660 # An informal proof, assuming A is 'outside' and B is the D ancestor:
661 #
661 #
662 # If we pick B as the base, the merge involves:
662 # If we pick B as the base, the merge involves:
663 # - changes from B to M (actual changeset payload)
663 # - changes from B to M (actual changeset payload)
664 # - changes from B to D (induced by rebase) as D is a rebased
664 # - changes from B to D (induced by rebase) as D is a rebased
665 # version of B)
665 # version of B)
666 # Which exactly represent the rebase operation.
666 # Which exactly represent the rebase operation.
667 #
667 #
668 # If we pick A as the base, the merge involves:
668 # If we pick A as the base, the merge involves:
669 # - changes from A to M (actual changeset payload)
669 # - changes from A to M (actual changeset payload)
670 # - changes from A to D (with include changes between unrelated A and B
670 # - changes from A to D (with include changes between unrelated A and B
671 # plus changes induced by rebase)
671 # plus changes induced by rebase)
672 # Which does not represent anything sensible and creates a lot of
672 # Which does not represent anything sensible and creates a lot of
673 # conflicts. A is thus not the right choice - B is.
673 # conflicts. A is thus not the right choice - B is.
674 #
674 #
675 # Note: The base found in this 'proof' is only correct in the specified
675 # Note: The base found in this 'proof' is only correct in the specified
676 # case. This base does not make sense if is not D a descendant of A or B
676 # case. This base does not make sense if is not D a descendant of A or B
677 # or if the other is not parent 'outside' (especially not if the other
677 # or if the other is not parent 'outside' (especially not if the other
678 # parent has been rebased). The current implementation does not
678 # parent has been rebased). The current implementation does not
679 # make it feasible to consider different cases separately. In these
679 # make it feasible to consider different cases separately. In these
680 # other cases we currently just leave it to the user to correctly
680 # other cases we currently just leave it to the user to correctly
681 # resolve an impossible merge using a wrong ancestor.
681 # resolve an impossible merge using a wrong ancestor.
682 for p in repo[rev].parents():
682 for p in repo[rev].parents():
683 if state.get(p.rev()) == p1:
683 if state.get(p.rev()) == p1:
684 base = p.rev()
684 base = p.rev()
685 break
685 break
686 else: # fallback when base not found
686 else: # fallback when base not found
687 base = None
687 base = None
688
688
689 # Raise because this function is called wrong (see issue 4106)
689 # Raise because this function is called wrong (see issue 4106)
690 raise AssertionError('no base found to rebase on '
690 raise AssertionError('no base found to rebase on '
691 '(defineparents called wrong)')
691 '(defineparents called wrong)')
692 return p1, p2, base
692 return p1, p2, base
693
693
694 def isagitpatch(repo, patchname):
694 def isagitpatch(repo, patchname):
695 'Return true if the given patch is in git format'
695 'Return true if the given patch is in git format'
696 mqpatch = os.path.join(repo.mq.path, patchname)
696 mqpatch = os.path.join(repo.mq.path, patchname)
697 for line in patch.linereader(file(mqpatch, 'rb')):
697 for line in patch.linereader(file(mqpatch, 'rb')):
698 if line.startswith('diff --git'):
698 if line.startswith('diff --git'):
699 return True
699 return True
700 return False
700 return False
701
701
702 def updatemq(repo, state, skipped, **opts):
702 def updatemq(repo, state, skipped, **opts):
703 'Update rebased mq patches - finalize and then import them'
703 'Update rebased mq patches - finalize and then import them'
704 mqrebase = {}
704 mqrebase = {}
705 mq = repo.mq
705 mq = repo.mq
706 original_series = mq.fullseries[:]
706 original_series = mq.fullseries[:]
707 skippedpatches = set()
707 skippedpatches = set()
708
708
709 for p in mq.applied:
709 for p in mq.applied:
710 rev = repo[p.node].rev()
710 rev = repo[p.node].rev()
711 if rev in state:
711 if rev in state:
712 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
712 repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
713 (rev, p.name))
713 (rev, p.name))
714 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
714 mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
715 else:
715 else:
716 # Applied but not rebased, not sure this should happen
716 # Applied but not rebased, not sure this should happen
717 skippedpatches.add(p.name)
717 skippedpatches.add(p.name)
718
718
719 if mqrebase:
719 if mqrebase:
720 mq.finish(repo, mqrebase.keys())
720 mq.finish(repo, mqrebase.keys())
721
721
722 # We must start import from the newest revision
722 # We must start import from the newest revision
723 for rev in sorted(mqrebase, reverse=True):
723 for rev in sorted(mqrebase, reverse=True):
724 if rev not in skipped:
724 if rev not in skipped:
725 name, isgit = mqrebase[rev]
725 name, isgit = mqrebase[rev]
726 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
726 repo.ui.note(_('updating mq patch %s to %s:%s\n') %
727 (name, state[rev], repo[state[rev]]))
727 (name, state[rev], repo[state[rev]]))
728 mq.qimport(repo, (), patchname=name, git=isgit,
728 mq.qimport(repo, (), patchname=name, git=isgit,
729 rev=[str(state[rev])])
729 rev=[str(state[rev])])
730 else:
730 else:
731 # Rebased and skipped
731 # Rebased and skipped
732 skippedpatches.add(mqrebase[rev][0])
732 skippedpatches.add(mqrebase[rev][0])
733
733
734 # Patches were either applied and rebased and imported in
734 # Patches were either applied and rebased and imported in
735 # order, applied and removed or unapplied. Discard the removed
735 # order, applied and removed or unapplied. Discard the removed
736 # ones while preserving the original series order and guards.
736 # ones while preserving the original series order and guards.
737 newseries = [s for s in original_series
737 newseries = [s for s in original_series
738 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
738 if mq.guard_re.split(s, 1)[0] not in skippedpatches]
739 mq.fullseries[:] = newseries
739 mq.fullseries[:] = newseries
740 mq.seriesdirty = True
740 mq.seriesdirty = True
741 mq.savedirty()
741 mq.savedirty()
742
742
743 def updatebookmarks(repo, targetnode, nstate, originalbookmarks):
743 def updatebookmarks(repo, targetnode, nstate, originalbookmarks):
744 'Move bookmarks to their correct changesets, and delete divergent ones'
744 'Move bookmarks to their correct changesets, and delete divergent ones'
745 marks = repo._bookmarks
745 marks = repo._bookmarks
746 for k, v in originalbookmarks.iteritems():
746 for k, v in originalbookmarks.iteritems():
747 if v in nstate:
747 if v in nstate:
748 # update the bookmarks for revs that have moved
748 # update the bookmarks for revs that have moved
749 marks[k] = nstate[v]
749 marks[k] = nstate[v]
750 bookmarks.deletedivergent(repo, [targetnode], k)
750 bookmarks.deletedivergent(repo, [targetnode], k)
751
751
752 marks.write()
752 marks.write()
753
753
754 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
754 def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
755 external, activebookmark):
755 external, activebookmark):
756 'Store the current status to allow recovery'
756 'Store the current status to allow recovery'
757 f = repo.opener("rebasestate", "w")
757 f = repo.vfs("rebasestate", "w")
758 f.write(repo[originalwd].hex() + '\n')
758 f.write(repo[originalwd].hex() + '\n')
759 f.write(repo[target].hex() + '\n')
759 f.write(repo[target].hex() + '\n')
760 f.write(repo[external].hex() + '\n')
760 f.write(repo[external].hex() + '\n')
761 f.write('%d\n' % int(collapse))
761 f.write('%d\n' % int(collapse))
762 f.write('%d\n' % int(keep))
762 f.write('%d\n' % int(keep))
763 f.write('%d\n' % int(keepbranches))
763 f.write('%d\n' % int(keepbranches))
764 f.write('%s\n' % (activebookmark or ''))
764 f.write('%s\n' % (activebookmark or ''))
765 for d, v in state.iteritems():
765 for d, v in state.iteritems():
766 oldrev = repo[d].hex()
766 oldrev = repo[d].hex()
767 if v >= 0:
767 if v >= 0:
768 newrev = repo[v].hex()
768 newrev = repo[v].hex()
769 elif v == revtodo:
769 elif v == revtodo:
770 # To maintain format compatibility, we have to use nullid.
770 # To maintain format compatibility, we have to use nullid.
771 # Please do remove this special case when upgrading the format.
771 # Please do remove this special case when upgrading the format.
772 newrev = hex(nullid)
772 newrev = hex(nullid)
773 else:
773 else:
774 newrev = v
774 newrev = v
775 f.write("%s:%s\n" % (oldrev, newrev))
775 f.write("%s:%s\n" % (oldrev, newrev))
776 f.close()
776 f.close()
777 repo.ui.debug('rebase status stored\n')
777 repo.ui.debug('rebase status stored\n')
778
778
779 def clearstatus(repo):
779 def clearstatus(repo):
780 'Remove the status files'
780 'Remove the status files'
781 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
781 util.unlinkpath(repo.join("rebasestate"), ignoremissing=True)
782
782
783 def restorestatus(repo):
783 def restorestatus(repo):
784 'Restore a previously stored status'
784 'Restore a previously stored status'
785 try:
785 try:
786 keepbranches = None
786 keepbranches = None
787 target = None
787 target = None
788 collapse = False
788 collapse = False
789 external = nullrev
789 external = nullrev
790 activebookmark = None
790 activebookmark = None
791 state = {}
791 state = {}
792 f = repo.opener("rebasestate")
792 f = repo.vfs("rebasestate")
793 for i, l in enumerate(f.read().splitlines()):
793 for i, l in enumerate(f.read().splitlines()):
794 if i == 0:
794 if i == 0:
795 originalwd = repo[l].rev()
795 originalwd = repo[l].rev()
796 elif i == 1:
796 elif i == 1:
797 target = repo[l].rev()
797 target = repo[l].rev()
798 elif i == 2:
798 elif i == 2:
799 external = repo[l].rev()
799 external = repo[l].rev()
800 elif i == 3:
800 elif i == 3:
801 collapse = bool(int(l))
801 collapse = bool(int(l))
802 elif i == 4:
802 elif i == 4:
803 keep = bool(int(l))
803 keep = bool(int(l))
804 elif i == 5:
804 elif i == 5:
805 keepbranches = bool(int(l))
805 keepbranches = bool(int(l))
806 elif i == 6 and not (len(l) == 81 and ':' in l):
806 elif i == 6 and not (len(l) == 81 and ':' in l):
807 # line 6 is a recent addition, so for backwards compatibility
807 # line 6 is a recent addition, so for backwards compatibility
808 # check that the line doesn't look like the oldrev:newrev lines
808 # check that the line doesn't look like the oldrev:newrev lines
809 activebookmark = l
809 activebookmark = l
810 else:
810 else:
811 oldrev, newrev = l.split(':')
811 oldrev, newrev = l.split(':')
812 if newrev in (str(nullmerge), str(revignored)):
812 if newrev in (str(nullmerge), str(revignored)):
813 state[repo[oldrev].rev()] = int(newrev)
813 state[repo[oldrev].rev()] = int(newrev)
814 elif newrev == nullid:
814 elif newrev == nullid:
815 state[repo[oldrev].rev()] = revtodo
815 state[repo[oldrev].rev()] = revtodo
816 # Legacy compat special case
816 # Legacy compat special case
817 else:
817 else:
818 state[repo[oldrev].rev()] = repo[newrev].rev()
818 state[repo[oldrev].rev()] = repo[newrev].rev()
819
819
820 if keepbranches is None:
820 if keepbranches is None:
821 raise util.Abort(_('.hg/rebasestate is incomplete'))
821 raise util.Abort(_('.hg/rebasestate is incomplete'))
822
822
823 skipped = set()
823 skipped = set()
824 # recompute the set of skipped revs
824 # recompute the set of skipped revs
825 if not collapse:
825 if not collapse:
826 seen = set([target])
826 seen = set([target])
827 for old, new in sorted(state.items()):
827 for old, new in sorted(state.items()):
828 if new != revtodo and new in seen:
828 if new != revtodo and new in seen:
829 skipped.add(old)
829 skipped.add(old)
830 seen.add(new)
830 seen.add(new)
831 repo.ui.debug('computed skipped revs: %s\n' %
831 repo.ui.debug('computed skipped revs: %s\n' %
832 (' '.join(str(r) for r in sorted(skipped)) or None))
832 (' '.join(str(r) for r in sorted(skipped)) or None))
833 repo.ui.debug('rebase status resumed\n')
833 repo.ui.debug('rebase status resumed\n')
834 return (originalwd, target, state, skipped,
834 return (originalwd, target, state, skipped,
835 collapse, keep, keepbranches, external, activebookmark)
835 collapse, keep, keepbranches, external, activebookmark)
836 except IOError, err:
836 except IOError, err:
837 if err.errno != errno.ENOENT:
837 if err.errno != errno.ENOENT:
838 raise
838 raise
839 raise util.Abort(_('no rebase in progress'))
839 raise util.Abort(_('no rebase in progress'))
840
840
841 def inrebase(repo, originalwd, state):
841 def inrebase(repo, originalwd, state):
842 '''check whether the working dir is in an interrupted rebase'''
842 '''check whether the working dir is in an interrupted rebase'''
843 parents = [p.rev() for p in repo.parents()]
843 parents = [p.rev() for p in repo.parents()]
844 if originalwd in parents:
844 if originalwd in parents:
845 return True
845 return True
846
846
847 for newrev in state.itervalues():
847 for newrev in state.itervalues():
848 if newrev in parents:
848 if newrev in parents:
849 return True
849 return True
850
850
851 return False
851 return False
852
852
853 def abort(repo, originalwd, target, state):
853 def abort(repo, originalwd, target, state):
854 'Restore the repository to its original state'
854 'Restore the repository to its original state'
855 dstates = [s for s in state.values() if s >= 0]
855 dstates = [s for s in state.values() if s >= 0]
856 immutable = [d for d in dstates if not repo[d].mutable()]
856 immutable = [d for d in dstates if not repo[d].mutable()]
857 cleanup = True
857 cleanup = True
858 if immutable:
858 if immutable:
859 repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
859 repo.ui.warn(_("warning: can't clean up immutable changesets %s\n")
860 % ', '.join(str(repo[r]) for r in immutable),
860 % ', '.join(str(repo[r]) for r in immutable),
861 hint=_('see hg help phases for details'))
861 hint=_('see hg help phases for details'))
862 cleanup = False
862 cleanup = False
863
863
864 descendants = set()
864 descendants = set()
865 if dstates:
865 if dstates:
866 descendants = set(repo.changelog.descendants(dstates))
866 descendants = set(repo.changelog.descendants(dstates))
867 if descendants - set(dstates):
867 if descendants - set(dstates):
868 repo.ui.warn(_("warning: new changesets detected on target branch, "
868 repo.ui.warn(_("warning: new changesets detected on target branch, "
869 "can't strip\n"))
869 "can't strip\n"))
870 cleanup = False
870 cleanup = False
871
871
872 if cleanup:
872 if cleanup:
873 # Update away from the rebase if necessary
873 # Update away from the rebase if necessary
874 if inrebase(repo, originalwd, state):
874 if inrebase(repo, originalwd, state):
875 merge.update(repo, originalwd, False, True, False)
875 merge.update(repo, originalwd, False, True, False)
876
876
877 # Strip from the first rebased revision
877 # Strip from the first rebased revision
878 rebased = filter(lambda x: x >= 0 and x != target, state.values())
878 rebased = filter(lambda x: x >= 0 and x != target, state.values())
879 if rebased:
879 if rebased:
880 strippoints = [c.node() for c in repo.set('roots(%ld)', rebased)]
880 strippoints = [c.node() for c in repo.set('roots(%ld)', rebased)]
881 # no backup of rebased cset versions needed
881 # no backup of rebased cset versions needed
882 repair.strip(repo.ui, repo, strippoints)
882 repair.strip(repo.ui, repo, strippoints)
883
883
884 clearstatus(repo)
884 clearstatus(repo)
885 repo.ui.warn(_('rebase aborted\n'))
885 repo.ui.warn(_('rebase aborted\n'))
886 return 0
886 return 0
887
887
888 def buildstate(repo, dest, rebaseset, collapse):
888 def buildstate(repo, dest, rebaseset, collapse):
889 '''Define which revisions are going to be rebased and where
889 '''Define which revisions are going to be rebased and where
890
890
891 repo: repo
891 repo: repo
892 dest: context
892 dest: context
893 rebaseset: set of rev
893 rebaseset: set of rev
894 '''
894 '''
895
895
896 # This check isn't strictly necessary, since mq detects commits over an
896 # This check isn't strictly necessary, since mq detects commits over an
897 # applied patch. But it prevents messing up the working directory when
897 # applied patch. But it prevents messing up the working directory when
898 # a partially completed rebase is blocked by mq.
898 # a partially completed rebase is blocked by mq.
899 if 'qtip' in repo.tags() and (dest.node() in
899 if 'qtip' in repo.tags() and (dest.node() in
900 [s.node for s in repo.mq.applied]):
900 [s.node for s in repo.mq.applied]):
901 raise util.Abort(_('cannot rebase onto an applied mq patch'))
901 raise util.Abort(_('cannot rebase onto an applied mq patch'))
902
902
903 roots = list(repo.set('roots(%ld)', rebaseset))
903 roots = list(repo.set('roots(%ld)', rebaseset))
904 if not roots:
904 if not roots:
905 raise util.Abort(_('no matching revisions'))
905 raise util.Abort(_('no matching revisions'))
906 roots.sort()
906 roots.sort()
907 state = {}
907 state = {}
908 detachset = set()
908 detachset = set()
909 for root in roots:
909 for root in roots:
910 commonbase = root.ancestor(dest)
910 commonbase = root.ancestor(dest)
911 if commonbase == root:
911 if commonbase == root:
912 raise util.Abort(_('source is ancestor of destination'))
912 raise util.Abort(_('source is ancestor of destination'))
913 if commonbase == dest:
913 if commonbase == dest:
914 samebranch = root.branch() == dest.branch()
914 samebranch = root.branch() == dest.branch()
915 if not collapse and samebranch and root in dest.children():
915 if not collapse and samebranch and root in dest.children():
916 repo.ui.debug('source is a child of destination\n')
916 repo.ui.debug('source is a child of destination\n')
917 return None
917 return None
918
918
919 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
919 repo.ui.debug('rebase onto %d starting from %s\n' % (dest, root))
920 state.update(dict.fromkeys(rebaseset, revtodo))
920 state.update(dict.fromkeys(rebaseset, revtodo))
921 # Rebase tries to turn <dest> into a parent of <root> while
921 # Rebase tries to turn <dest> into a parent of <root> while
922 # preserving the number of parents of rebased changesets:
922 # preserving the number of parents of rebased changesets:
923 #
923 #
924 # - A changeset with a single parent will always be rebased as a
924 # - A changeset with a single parent will always be rebased as a
925 # changeset with a single parent.
925 # changeset with a single parent.
926 #
926 #
927 # - A merge will be rebased as merge unless its parents are both
927 # - A merge will be rebased as merge unless its parents are both
928 # ancestors of <dest> or are themselves in the rebased set and
928 # ancestors of <dest> or are themselves in the rebased set and
929 # pruned while rebased.
929 # pruned while rebased.
930 #
930 #
931 # If one parent of <root> is an ancestor of <dest>, the rebased
931 # If one parent of <root> is an ancestor of <dest>, the rebased
932 # version of this parent will be <dest>. This is always true with
932 # version of this parent will be <dest>. This is always true with
933 # --base option.
933 # --base option.
934 #
934 #
935 # Otherwise, we need to *replace* the original parents with
935 # Otherwise, we need to *replace* the original parents with
936 # <dest>. This "detaches" the rebased set from its former location
936 # <dest>. This "detaches" the rebased set from its former location
937 # and rebases it onto <dest>. Changes introduced by ancestors of
937 # and rebases it onto <dest>. Changes introduced by ancestors of
938 # <root> not common with <dest> (the detachset, marked as
938 # <root> not common with <dest> (the detachset, marked as
939 # nullmerge) are "removed" from the rebased changesets.
939 # nullmerge) are "removed" from the rebased changesets.
940 #
940 #
941 # - If <root> has a single parent, set it to <dest>.
941 # - If <root> has a single parent, set it to <dest>.
942 #
942 #
943 # - If <root> is a merge, we cannot decide which parent to
943 # - If <root> is a merge, we cannot decide which parent to
944 # replace, the rebase operation is not clearly defined.
944 # replace, the rebase operation is not clearly defined.
945 #
945 #
946 # The table below sums up this behavior:
946 # The table below sums up this behavior:
947 #
947 #
948 # +------------------+----------------------+-------------------------+
948 # +------------------+----------------------+-------------------------+
949 # | | one parent | merge |
949 # | | one parent | merge |
950 # +------------------+----------------------+-------------------------+
950 # +------------------+----------------------+-------------------------+
951 # | parent in | new parent is <dest> | parents in ::<dest> are |
951 # | parent in | new parent is <dest> | parents in ::<dest> are |
952 # | ::<dest> | | remapped to <dest> |
952 # | ::<dest> | | remapped to <dest> |
953 # +------------------+----------------------+-------------------------+
953 # +------------------+----------------------+-------------------------+
954 # | unrelated source | new parent is <dest> | ambiguous, abort |
954 # | unrelated source | new parent is <dest> | ambiguous, abort |
955 # +------------------+----------------------+-------------------------+
955 # +------------------+----------------------+-------------------------+
956 #
956 #
957 # The actual abort is handled by `defineparents`
957 # The actual abort is handled by `defineparents`
958 if len(root.parents()) <= 1:
958 if len(root.parents()) <= 1:
959 # ancestors of <root> not ancestors of <dest>
959 # ancestors of <root> not ancestors of <dest>
960 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
960 detachset.update(repo.changelog.findmissingrevs([commonbase.rev()],
961 [root.rev()]))
961 [root.rev()]))
962 for r in detachset:
962 for r in detachset:
963 if r not in state:
963 if r not in state:
964 state[r] = nullmerge
964 state[r] = nullmerge
965 if len(roots) > 1:
965 if len(roots) > 1:
966 # If we have multiple roots, we may have "hole" in the rebase set.
966 # If we have multiple roots, we may have "hole" in the rebase set.
967 # Rebase roots that descend from those "hole" should not be detached as
967 # Rebase roots that descend from those "hole" should not be detached as
968 # other root are. We use the special `revignored` to inform rebase that
968 # other root are. We use the special `revignored` to inform rebase that
969 # the revision should be ignored but that `defineparents` should search
969 # the revision should be ignored but that `defineparents` should search
970 # a rebase destination that make sense regarding rebased topology.
970 # a rebase destination that make sense regarding rebased topology.
971 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
971 rebasedomain = set(repo.revs('%ld::%ld', rebaseset, rebaseset))
972 for ignored in set(rebasedomain) - set(rebaseset):
972 for ignored in set(rebasedomain) - set(rebaseset):
973 state[ignored] = revignored
973 state[ignored] = revignored
974 return repo['.'].rev(), dest.rev(), state
974 return repo['.'].rev(), dest.rev(), state
975
975
976 def clearrebased(ui, repo, state, skipped, collapsedas=None):
976 def clearrebased(ui, repo, state, skipped, collapsedas=None):
977 """dispose of rebased revision at the end of the rebase
977 """dispose of rebased revision at the end of the rebase
978
978
979 If `collapsedas` is not None, the rebase was a collapse whose result if the
979 If `collapsedas` is not None, the rebase was a collapse whose result if the
980 `collapsedas` node."""
980 `collapsedas` node."""
981 if obsolete.isenabled(repo, obsolete.createmarkersopt):
981 if obsolete.isenabled(repo, obsolete.createmarkersopt):
982 markers = []
982 markers = []
983 for rev, newrev in sorted(state.items()):
983 for rev, newrev in sorted(state.items()):
984 if newrev >= 0:
984 if newrev >= 0:
985 if rev in skipped:
985 if rev in skipped:
986 succs = ()
986 succs = ()
987 elif collapsedas is not None:
987 elif collapsedas is not None:
988 succs = (repo[collapsedas],)
988 succs = (repo[collapsedas],)
989 else:
989 else:
990 succs = (repo[newrev],)
990 succs = (repo[newrev],)
991 markers.append((repo[rev], succs))
991 markers.append((repo[rev], succs))
992 if markers:
992 if markers:
993 obsolete.createmarkers(repo, markers)
993 obsolete.createmarkers(repo, markers)
994 else:
994 else:
995 rebased = [rev for rev in state if state[rev] > nullmerge]
995 rebased = [rev for rev in state if state[rev] > nullmerge]
996 if rebased:
996 if rebased:
997 stripped = []
997 stripped = []
998 for root in repo.set('roots(%ld)', rebased):
998 for root in repo.set('roots(%ld)', rebased):
999 if set(repo.changelog.descendants([root.rev()])) - set(state):
999 if set(repo.changelog.descendants([root.rev()])) - set(state):
1000 ui.warn(_("warning: new changesets detected "
1000 ui.warn(_("warning: new changesets detected "
1001 "on source branch, not stripping\n"))
1001 "on source branch, not stripping\n"))
1002 else:
1002 else:
1003 stripped.append(root.node())
1003 stripped.append(root.node())
1004 if stripped:
1004 if stripped:
1005 # backup the old csets by default
1005 # backup the old csets by default
1006 repair.strip(ui, repo, stripped, "all")
1006 repair.strip(ui, repo, stripped, "all")
1007
1007
1008
1008
1009 def pullrebase(orig, ui, repo, *args, **opts):
1009 def pullrebase(orig, ui, repo, *args, **opts):
1010 'Call rebase after pull if the latter has been invoked with --rebase'
1010 'Call rebase after pull if the latter has been invoked with --rebase'
1011 if opts.get('rebase'):
1011 if opts.get('rebase'):
1012 if opts.get('update'):
1012 if opts.get('update'):
1013 del opts['update']
1013 del opts['update']
1014 ui.debug('--update and --rebase are not compatible, ignoring '
1014 ui.debug('--update and --rebase are not compatible, ignoring '
1015 'the update flag\n')
1015 'the update flag\n')
1016
1016
1017 movemarkfrom = repo['.'].node()
1017 movemarkfrom = repo['.'].node()
1018 revsprepull = len(repo)
1018 revsprepull = len(repo)
1019 origpostincoming = commands.postincoming
1019 origpostincoming = commands.postincoming
1020 def _dummy(*args, **kwargs):
1020 def _dummy(*args, **kwargs):
1021 pass
1021 pass
1022 commands.postincoming = _dummy
1022 commands.postincoming = _dummy
1023 try:
1023 try:
1024 orig(ui, repo, *args, **opts)
1024 orig(ui, repo, *args, **opts)
1025 finally:
1025 finally:
1026 commands.postincoming = origpostincoming
1026 commands.postincoming = origpostincoming
1027 revspostpull = len(repo)
1027 revspostpull = len(repo)
1028 if revspostpull > revsprepull:
1028 if revspostpull > revsprepull:
1029 # --rev option from pull conflict with rebase own --rev
1029 # --rev option from pull conflict with rebase own --rev
1030 # dropping it
1030 # dropping it
1031 if 'rev' in opts:
1031 if 'rev' in opts:
1032 del opts['rev']
1032 del opts['rev']
1033 rebase(ui, repo, **opts)
1033 rebase(ui, repo, **opts)
1034 branch = repo[None].branch()
1034 branch = repo[None].branch()
1035 dest = repo[branch].rev()
1035 dest = repo[branch].rev()
1036 if dest != repo['.'].rev():
1036 if dest != repo['.'].rev():
1037 # there was nothing to rebase we force an update
1037 # there was nothing to rebase we force an update
1038 hg.update(repo, dest)
1038 hg.update(repo, dest)
1039 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1039 if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
1040 ui.status(_("updating bookmark %s\n")
1040 ui.status(_("updating bookmark %s\n")
1041 % repo._bookmarkcurrent)
1041 % repo._bookmarkcurrent)
1042 else:
1042 else:
1043 if opts.get('tool'):
1043 if opts.get('tool'):
1044 raise util.Abort(_('--tool can only be used with --rebase'))
1044 raise util.Abort(_('--tool can only be used with --rebase'))
1045 orig(ui, repo, *args, **opts)
1045 orig(ui, repo, *args, **opts)
1046
1046
1047 def summaryhook(ui, repo):
1047 def summaryhook(ui, repo):
1048 if not os.path.exists(repo.join('rebasestate')):
1048 if not os.path.exists(repo.join('rebasestate')):
1049 return
1049 return
1050 try:
1050 try:
1051 state = restorestatus(repo)[2]
1051 state = restorestatus(repo)[2]
1052 except error.RepoLookupError:
1052 except error.RepoLookupError:
1053 # i18n: column positioning for "hg summary"
1053 # i18n: column positioning for "hg summary"
1054 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1054 msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
1055 ui.write(msg)
1055 ui.write(msg)
1056 return
1056 return
1057 numrebased = len([i for i in state.itervalues() if i >= 0])
1057 numrebased = len([i for i in state.itervalues() if i >= 0])
1058 # i18n: column positioning for "hg summary"
1058 # i18n: column positioning for "hg summary"
1059 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1059 ui.write(_('rebase: %s, %s (rebase --continue)\n') %
1060 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1060 (ui.label(_('%d rebased'), 'rebase.rebased') % numrebased,
1061 ui.label(_('%d remaining'), 'rebase.remaining') %
1061 ui.label(_('%d remaining'), 'rebase.remaining') %
1062 (len(state) - numrebased)))
1062 (len(state) - numrebased)))
1063
1063
1064 def uisetup(ui):
1064 def uisetup(ui):
1065 'Replace pull with a decorator to provide --rebase option'
1065 'Replace pull with a decorator to provide --rebase option'
1066 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1066 entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
1067 entry[1].append(('', 'rebase', None,
1067 entry[1].append(('', 'rebase', None,
1068 _("rebase working directory to branch head")))
1068 _("rebase working directory to branch head")))
1069 entry[1].append(('t', 'tool', '',
1069 entry[1].append(('t', 'tool', '',
1070 _("specify merge tool for rebase")))
1070 _("specify merge tool for rebase")))
1071 cmdutil.summaryhooks.add('rebase', summaryhook)
1071 cmdutil.summaryhooks.add('rebase', summaryhook)
1072 cmdutil.unfinishedstates.append(
1072 cmdutil.unfinishedstates.append(
1073 ['rebasestate', False, False, _('rebase in progress'),
1073 ['rebasestate', False, False, _('rebase in progress'),
1074 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
1074 _("use 'hg rebase --continue' or 'hg rebase --abort'")])
@@ -1,721 +1,721 b''
1 # shelve.py - save/restore working directory state
1 # shelve.py - save/restore working directory state
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 """save and restore changes to the working directory
8 """save and restore changes to the working directory
9
9
10 The "hg shelve" command saves changes made to the working directory
10 The "hg shelve" command saves changes made to the working directory
11 and reverts those changes, resetting the working directory to a clean
11 and reverts those changes, resetting the working directory to a clean
12 state.
12 state.
13
13
14 Later on, the "hg unshelve" command restores the changes saved by "hg
14 Later on, the "hg unshelve" command restores the changes saved by "hg
15 shelve". Changes can be restored even after updating to a different
15 shelve". Changes can be restored even after updating to a different
16 parent, in which case Mercurial's merge machinery will resolve any
16 parent, in which case Mercurial's merge machinery will resolve any
17 conflicts if necessary.
17 conflicts if necessary.
18
18
19 You can have more than one shelved change outstanding at a time; each
19 You can have more than one shelved change outstanding at a time; each
20 shelved change has a distinct name. For details, see the help for "hg
20 shelved change has a distinct name. For details, see the help for "hg
21 shelve".
21 shelve".
22 """
22 """
23
23
24 from mercurial.i18n import _
24 from mercurial.i18n import _
25 from mercurial.node import nullid, nullrev, bin, hex
25 from mercurial.node import nullid, nullrev, bin, hex
26 from mercurial import changegroup, cmdutil, scmutil, phases, commands
26 from mercurial import changegroup, cmdutil, scmutil, phases, commands
27 from mercurial import error, hg, mdiff, merge, patch, repair, util
27 from mercurial import error, hg, mdiff, merge, patch, repair, util
28 from mercurial import templatefilters, exchange, bundlerepo
28 from mercurial import templatefilters, exchange, bundlerepo
29 from mercurial import lock as lockmod
29 from mercurial import lock as lockmod
30 from hgext import rebase
30 from hgext import rebase
31 import errno
31 import errno
32
32
33 cmdtable = {}
33 cmdtable = {}
34 command = cmdutil.command(cmdtable)
34 command = cmdutil.command(cmdtable)
35 testedwith = 'internal'
35 testedwith = 'internal'
36
36
37 class shelvedfile(object):
37 class shelvedfile(object):
38 """Helper for the file storing a single shelve
38 """Helper for the file storing a single shelve
39
39
40 Handles common functions on shelve files (.hg/.patch) using
40 Handles common functions on shelve files (.hg/.patch) using
41 the vfs layer"""
41 the vfs layer"""
42 def __init__(self, repo, name, filetype=None):
42 def __init__(self, repo, name, filetype=None):
43 self.repo = repo
43 self.repo = repo
44 self.name = name
44 self.name = name
45 self.vfs = scmutil.vfs(repo.join('shelved'))
45 self.vfs = scmutil.vfs(repo.join('shelved'))
46 if filetype:
46 if filetype:
47 self.fname = name + '.' + filetype
47 self.fname = name + '.' + filetype
48 else:
48 else:
49 self.fname = name
49 self.fname = name
50
50
51 def exists(self):
51 def exists(self):
52 return self.vfs.exists(self.fname)
52 return self.vfs.exists(self.fname)
53
53
54 def filename(self):
54 def filename(self):
55 return self.vfs.join(self.fname)
55 return self.vfs.join(self.fname)
56
56
57 def unlink(self):
57 def unlink(self):
58 util.unlink(self.filename())
58 util.unlink(self.filename())
59
59
60 def stat(self):
60 def stat(self):
61 return self.vfs.stat(self.fname)
61 return self.vfs.stat(self.fname)
62
62
63 def opener(self, mode='rb'):
63 def opener(self, mode='rb'):
64 try:
64 try:
65 return self.vfs(self.fname, mode)
65 return self.vfs(self.fname, mode)
66 except IOError, err:
66 except IOError, err:
67 if err.errno != errno.ENOENT:
67 if err.errno != errno.ENOENT:
68 raise
68 raise
69 raise util.Abort(_("shelved change '%s' not found") % self.name)
69 raise util.Abort(_("shelved change '%s' not found") % self.name)
70
70
71 def applybundle(self):
71 def applybundle(self):
72 fp = self.opener()
72 fp = self.opener()
73 try:
73 try:
74 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
74 gen = exchange.readbundle(self.repo.ui, fp, self.fname, self.vfs)
75 changegroup.addchangegroup(self.repo, gen, 'unshelve',
75 changegroup.addchangegroup(self.repo, gen, 'unshelve',
76 'bundle:' + self.vfs.join(self.fname),
76 'bundle:' + self.vfs.join(self.fname),
77 targetphase=phases.secret)
77 targetphase=phases.secret)
78 finally:
78 finally:
79 fp.close()
79 fp.close()
80
80
81 def bundlerepo(self):
81 def bundlerepo(self):
82 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
82 return bundlerepo.bundlerepository(self.repo.baseui, self.repo.root,
83 self.vfs.join(self.fname))
83 self.vfs.join(self.fname))
84 def writebundle(self, cg):
84 def writebundle(self, cg):
85 changegroup.writebundle(cg, self.fname, 'HG10UN', self.vfs)
85 changegroup.writebundle(cg, self.fname, 'HG10UN', self.vfs)
86
86
87 class shelvedstate(object):
87 class shelvedstate(object):
88 """Handle persistence during unshelving operations.
88 """Handle persistence during unshelving operations.
89
89
90 Handles saving and restoring a shelved state. Ensures that different
90 Handles saving and restoring a shelved state. Ensures that different
91 versions of a shelved state are possible and handles them appropriately.
91 versions of a shelved state are possible and handles them appropriately.
92 """
92 """
93 _version = 1
93 _version = 1
94 _filename = 'shelvedstate'
94 _filename = 'shelvedstate'
95
95
96 @classmethod
96 @classmethod
97 def load(cls, repo):
97 def load(cls, repo):
98 fp = repo.opener(cls._filename)
98 fp = repo.vfs(cls._filename)
99 try:
99 try:
100 version = int(fp.readline().strip())
100 version = int(fp.readline().strip())
101
101
102 if version != cls._version:
102 if version != cls._version:
103 raise util.Abort(_('this version of shelve is incompatible '
103 raise util.Abort(_('this version of shelve is incompatible '
104 'with the version used in this repo'))
104 'with the version used in this repo'))
105 name = fp.readline().strip()
105 name = fp.readline().strip()
106 wctx = fp.readline().strip()
106 wctx = fp.readline().strip()
107 pendingctx = fp.readline().strip()
107 pendingctx = fp.readline().strip()
108 parents = [bin(h) for h in fp.readline().split()]
108 parents = [bin(h) for h in fp.readline().split()]
109 stripnodes = [bin(h) for h in fp.readline().split()]
109 stripnodes = [bin(h) for h in fp.readline().split()]
110 finally:
110 finally:
111 fp.close()
111 fp.close()
112
112
113 obj = cls()
113 obj = cls()
114 obj.name = name
114 obj.name = name
115 obj.wctx = repo[bin(wctx)]
115 obj.wctx = repo[bin(wctx)]
116 obj.pendingctx = repo[bin(pendingctx)]
116 obj.pendingctx = repo[bin(pendingctx)]
117 obj.parents = parents
117 obj.parents = parents
118 obj.stripnodes = stripnodes
118 obj.stripnodes = stripnodes
119
119
120 return obj
120 return obj
121
121
122 @classmethod
122 @classmethod
123 def save(cls, repo, name, originalwctx, pendingctx, stripnodes):
123 def save(cls, repo, name, originalwctx, pendingctx, stripnodes):
124 fp = repo.opener(cls._filename, 'wb')
124 fp = repo.vfs(cls._filename, 'wb')
125 fp.write('%i\n' % cls._version)
125 fp.write('%i\n' % cls._version)
126 fp.write('%s\n' % name)
126 fp.write('%s\n' % name)
127 fp.write('%s\n' % hex(originalwctx.node()))
127 fp.write('%s\n' % hex(originalwctx.node()))
128 fp.write('%s\n' % hex(pendingctx.node()))
128 fp.write('%s\n' % hex(pendingctx.node()))
129 fp.write('%s\n' % ' '.join([hex(p) for p in repo.dirstate.parents()]))
129 fp.write('%s\n' % ' '.join([hex(p) for p in repo.dirstate.parents()]))
130 fp.write('%s\n' % ' '.join([hex(n) for n in stripnodes]))
130 fp.write('%s\n' % ' '.join([hex(n) for n in stripnodes]))
131 fp.close()
131 fp.close()
132
132
133 @classmethod
133 @classmethod
134 def clear(cls, repo):
134 def clear(cls, repo):
135 util.unlinkpath(repo.join(cls._filename), ignoremissing=True)
135 util.unlinkpath(repo.join(cls._filename), ignoremissing=True)
136
136
137 def createcmd(ui, repo, pats, opts):
137 def createcmd(ui, repo, pats, opts):
138 """subcommand that creates a new shelve"""
138 """subcommand that creates a new shelve"""
139
139
140 def publicancestors(ctx):
140 def publicancestors(ctx):
141 """Compute the public ancestors of a commit.
141 """Compute the public ancestors of a commit.
142
142
143 Much faster than the revset ancestors(ctx) & draft()"""
143 Much faster than the revset ancestors(ctx) & draft()"""
144 seen = set([nullrev])
144 seen = set([nullrev])
145 visit = util.deque()
145 visit = util.deque()
146 visit.append(ctx)
146 visit.append(ctx)
147 while visit:
147 while visit:
148 ctx = visit.popleft()
148 ctx = visit.popleft()
149 yield ctx.node()
149 yield ctx.node()
150 for parent in ctx.parents():
150 for parent in ctx.parents():
151 rev = parent.rev()
151 rev = parent.rev()
152 if rev not in seen:
152 if rev not in seen:
153 seen.add(rev)
153 seen.add(rev)
154 if parent.mutable():
154 if parent.mutable():
155 visit.append(parent)
155 visit.append(parent)
156
156
157 wctx = repo[None]
157 wctx = repo[None]
158 parents = wctx.parents()
158 parents = wctx.parents()
159 if len(parents) > 1:
159 if len(parents) > 1:
160 raise util.Abort(_('cannot shelve while merging'))
160 raise util.Abort(_('cannot shelve while merging'))
161 parent = parents[0]
161 parent = parents[0]
162
162
163 # we never need the user, so we use a generic user for all shelve operations
163 # we never need the user, so we use a generic user for all shelve operations
164 user = 'shelve@localhost'
164 user = 'shelve@localhost'
165 label = repo._bookmarkcurrent or parent.branch() or 'default'
165 label = repo._bookmarkcurrent or parent.branch() or 'default'
166
166
167 # slashes aren't allowed in filenames, therefore we rename it
167 # slashes aren't allowed in filenames, therefore we rename it
168 label = label.replace('/', '_')
168 label = label.replace('/', '_')
169
169
170 def gennames():
170 def gennames():
171 yield label
171 yield label
172 for i in xrange(1, 100):
172 for i in xrange(1, 100):
173 yield '%s-%02d' % (label, i)
173 yield '%s-%02d' % (label, i)
174
174
175 def commitfunc(ui, repo, message, match, opts):
175 def commitfunc(ui, repo, message, match, opts):
176 hasmq = util.safehasattr(repo, 'mq')
176 hasmq = util.safehasattr(repo, 'mq')
177 if hasmq:
177 if hasmq:
178 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
178 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
179 backup = repo.ui.backupconfig('phases', 'new-commit')
179 backup = repo.ui.backupconfig('phases', 'new-commit')
180 try:
180 try:
181 repo.ui. setconfig('phases', 'new-commit', phases.secret)
181 repo.ui. setconfig('phases', 'new-commit', phases.secret)
182 editor = cmdutil.getcommiteditor(editform='shelve.shelve', **opts)
182 editor = cmdutil.getcommiteditor(editform='shelve.shelve', **opts)
183 return repo.commit(message, user, opts.get('date'), match,
183 return repo.commit(message, user, opts.get('date'), match,
184 editor=editor)
184 editor=editor)
185 finally:
185 finally:
186 repo.ui.restoreconfig(backup)
186 repo.ui.restoreconfig(backup)
187 if hasmq:
187 if hasmq:
188 repo.mq.checkapplied = saved
188 repo.mq.checkapplied = saved
189
189
190 if parent.node() != nullid:
190 if parent.node() != nullid:
191 desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
191 desc = "changes to '%s'" % parent.description().split('\n', 1)[0]
192 else:
192 else:
193 desc = '(changes in empty repository)'
193 desc = '(changes in empty repository)'
194
194
195 if not opts['message']:
195 if not opts['message']:
196 opts['message'] = desc
196 opts['message'] = desc
197
197
198 name = opts['name']
198 name = opts['name']
199
199
200 wlock = lock = tr = bms = None
200 wlock = lock = tr = bms = None
201 try:
201 try:
202 wlock = repo.wlock()
202 wlock = repo.wlock()
203 lock = repo.lock()
203 lock = repo.lock()
204
204
205 bms = repo._bookmarks.copy()
205 bms = repo._bookmarks.copy()
206 # use an uncommitted transaction to generate the bundle to avoid
206 # use an uncommitted transaction to generate the bundle to avoid
207 # pull races. ensure we don't print the abort message to stderr.
207 # pull races. ensure we don't print the abort message to stderr.
208 tr = repo.transaction('commit', report=lambda x: None)
208 tr = repo.transaction('commit', report=lambda x: None)
209
209
210 if name:
210 if name:
211 if shelvedfile(repo, name, 'hg').exists():
211 if shelvedfile(repo, name, 'hg').exists():
212 raise util.Abort(_("a shelved change named '%s' already exists")
212 raise util.Abort(_("a shelved change named '%s' already exists")
213 % name)
213 % name)
214 else:
214 else:
215 for n in gennames():
215 for n in gennames():
216 if not shelvedfile(repo, n, 'hg').exists():
216 if not shelvedfile(repo, n, 'hg').exists():
217 name = n
217 name = n
218 break
218 break
219 else:
219 else:
220 raise util.Abort(_("too many shelved changes named '%s'") %
220 raise util.Abort(_("too many shelved changes named '%s'") %
221 label)
221 label)
222
222
223 # ensure we are not creating a subdirectory or a hidden file
223 # ensure we are not creating a subdirectory or a hidden file
224 if '/' in name or '\\' in name:
224 if '/' in name or '\\' in name:
225 raise util.Abort(_('shelved change names may not contain slashes'))
225 raise util.Abort(_('shelved change names may not contain slashes'))
226 if name.startswith('.'):
226 if name.startswith('.'):
227 raise util.Abort(_("shelved change names may not start with '.'"))
227 raise util.Abort(_("shelved change names may not start with '.'"))
228
228
229 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
229 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
230
230
231 if not node:
231 if not node:
232 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
232 stat = repo.status(match=scmutil.match(repo[None], pats, opts))
233 if stat.deleted:
233 if stat.deleted:
234 ui.status(_("nothing changed (%d missing files, see "
234 ui.status(_("nothing changed (%d missing files, see "
235 "'hg status')\n") % len(stat.deleted))
235 "'hg status')\n") % len(stat.deleted))
236 else:
236 else:
237 ui.status(_("nothing changed\n"))
237 ui.status(_("nothing changed\n"))
238 return 1
238 return 1
239
239
240 bases = list(publicancestors(repo[node]))
240 bases = list(publicancestors(repo[node]))
241 cg = changegroup.changegroupsubset(repo, bases, [node], 'shelve')
241 cg = changegroup.changegroupsubset(repo, bases, [node], 'shelve')
242 shelvedfile(repo, name, 'hg').writebundle(cg)
242 shelvedfile(repo, name, 'hg').writebundle(cg)
243 cmdutil.export(repo, [node],
243 cmdutil.export(repo, [node],
244 fp=shelvedfile(repo, name, 'patch').opener('wb'),
244 fp=shelvedfile(repo, name, 'patch').opener('wb'),
245 opts=mdiff.diffopts(git=True))
245 opts=mdiff.diffopts(git=True))
246
246
247
247
248 if ui.formatted():
248 if ui.formatted():
249 desc = util.ellipsis(desc, ui.termwidth())
249 desc = util.ellipsis(desc, ui.termwidth())
250 ui.status(_('shelved as %s\n') % name)
250 ui.status(_('shelved as %s\n') % name)
251 hg.update(repo, parent.node())
251 hg.update(repo, parent.node())
252 finally:
252 finally:
253 if bms:
253 if bms:
254 # restore old bookmarks
254 # restore old bookmarks
255 repo._bookmarks.update(bms)
255 repo._bookmarks.update(bms)
256 repo._bookmarks.write()
256 repo._bookmarks.write()
257 if tr:
257 if tr:
258 tr.abort()
258 tr.abort()
259 lockmod.release(lock, wlock)
259 lockmod.release(lock, wlock)
260
260
261 def cleanupcmd(ui, repo):
261 def cleanupcmd(ui, repo):
262 """subcommand that deletes all shelves"""
262 """subcommand that deletes all shelves"""
263
263
264 wlock = None
264 wlock = None
265 try:
265 try:
266 wlock = repo.wlock()
266 wlock = repo.wlock()
267 for (name, _type) in repo.vfs.readdir('shelved'):
267 for (name, _type) in repo.vfs.readdir('shelved'):
268 suffix = name.rsplit('.', 1)[-1]
268 suffix = name.rsplit('.', 1)[-1]
269 if suffix in ('hg', 'patch'):
269 if suffix in ('hg', 'patch'):
270 shelvedfile(repo, name).unlink()
270 shelvedfile(repo, name).unlink()
271 finally:
271 finally:
272 lockmod.release(wlock)
272 lockmod.release(wlock)
273
273
274 def deletecmd(ui, repo, pats):
274 def deletecmd(ui, repo, pats):
275 """subcommand that deletes a specific shelve"""
275 """subcommand that deletes a specific shelve"""
276 if not pats:
276 if not pats:
277 raise util.Abort(_('no shelved changes specified!'))
277 raise util.Abort(_('no shelved changes specified!'))
278 wlock = None
278 wlock = None
279 try:
279 try:
280 wlock = repo.wlock()
280 wlock = repo.wlock()
281 try:
281 try:
282 for name in pats:
282 for name in pats:
283 for suffix in 'hg patch'.split():
283 for suffix in 'hg patch'.split():
284 shelvedfile(repo, name, suffix).unlink()
284 shelvedfile(repo, name, suffix).unlink()
285 except OSError, err:
285 except OSError, err:
286 if err.errno != errno.ENOENT:
286 if err.errno != errno.ENOENT:
287 raise
287 raise
288 raise util.Abort(_("shelved change '%s' not found") % name)
288 raise util.Abort(_("shelved change '%s' not found") % name)
289 finally:
289 finally:
290 lockmod.release(wlock)
290 lockmod.release(wlock)
291
291
292 def listshelves(repo):
292 def listshelves(repo):
293 """return all shelves in repo as list of (time, filename)"""
293 """return all shelves in repo as list of (time, filename)"""
294 try:
294 try:
295 names = repo.vfs.readdir('shelved')
295 names = repo.vfs.readdir('shelved')
296 except OSError, err:
296 except OSError, err:
297 if err.errno != errno.ENOENT:
297 if err.errno != errno.ENOENT:
298 raise
298 raise
299 return []
299 return []
300 info = []
300 info = []
301 for (name, _type) in names:
301 for (name, _type) in names:
302 pfx, sfx = name.rsplit('.', 1)
302 pfx, sfx = name.rsplit('.', 1)
303 if not pfx or sfx != 'patch':
303 if not pfx or sfx != 'patch':
304 continue
304 continue
305 st = shelvedfile(repo, name).stat()
305 st = shelvedfile(repo, name).stat()
306 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
306 info.append((st.st_mtime, shelvedfile(repo, pfx).filename()))
307 return sorted(info, reverse=True)
307 return sorted(info, reverse=True)
308
308
309 def listcmd(ui, repo, pats, opts):
309 def listcmd(ui, repo, pats, opts):
310 """subcommand that displays the list of shelves"""
310 """subcommand that displays the list of shelves"""
311 pats = set(pats)
311 pats = set(pats)
312 width = 80
312 width = 80
313 if not ui.plain():
313 if not ui.plain():
314 width = ui.termwidth()
314 width = ui.termwidth()
315 namelabel = 'shelve.newest'
315 namelabel = 'shelve.newest'
316 for mtime, name in listshelves(repo):
316 for mtime, name in listshelves(repo):
317 sname = util.split(name)[1]
317 sname = util.split(name)[1]
318 if pats and sname not in pats:
318 if pats and sname not in pats:
319 continue
319 continue
320 ui.write(sname, label=namelabel)
320 ui.write(sname, label=namelabel)
321 namelabel = 'shelve.name'
321 namelabel = 'shelve.name'
322 if ui.quiet:
322 if ui.quiet:
323 ui.write('\n')
323 ui.write('\n')
324 continue
324 continue
325 ui.write(' ' * (16 - len(sname)))
325 ui.write(' ' * (16 - len(sname)))
326 used = 16
326 used = 16
327 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
327 age = '(%s)' % templatefilters.age(util.makedate(mtime), abbrev=True)
328 ui.write(age, label='shelve.age')
328 ui.write(age, label='shelve.age')
329 ui.write(' ' * (12 - len(age)))
329 ui.write(' ' * (12 - len(age)))
330 used += 12
330 used += 12
331 fp = open(name + '.patch', 'rb')
331 fp = open(name + '.patch', 'rb')
332 try:
332 try:
333 while True:
333 while True:
334 line = fp.readline()
334 line = fp.readline()
335 if not line:
335 if not line:
336 break
336 break
337 if not line.startswith('#'):
337 if not line.startswith('#'):
338 desc = line.rstrip()
338 desc = line.rstrip()
339 if ui.formatted():
339 if ui.formatted():
340 desc = util.ellipsis(desc, width - used)
340 desc = util.ellipsis(desc, width - used)
341 ui.write(desc)
341 ui.write(desc)
342 break
342 break
343 ui.write('\n')
343 ui.write('\n')
344 if not (opts['patch'] or opts['stat']):
344 if not (opts['patch'] or opts['stat']):
345 continue
345 continue
346 difflines = fp.readlines()
346 difflines = fp.readlines()
347 if opts['patch']:
347 if opts['patch']:
348 for chunk, label in patch.difflabel(iter, difflines):
348 for chunk, label in patch.difflabel(iter, difflines):
349 ui.write(chunk, label=label)
349 ui.write(chunk, label=label)
350 if opts['stat']:
350 if opts['stat']:
351 for chunk, label in patch.diffstatui(difflines, width=width,
351 for chunk, label in patch.diffstatui(difflines, width=width,
352 git=True):
352 git=True):
353 ui.write(chunk, label=label)
353 ui.write(chunk, label=label)
354 finally:
354 finally:
355 fp.close()
355 fp.close()
356
356
357 def checkparents(repo, state):
357 def checkparents(repo, state):
358 """check parent while resuming an unshelve"""
358 """check parent while resuming an unshelve"""
359 if state.parents != repo.dirstate.parents():
359 if state.parents != repo.dirstate.parents():
360 raise util.Abort(_('working directory parents do not match unshelve '
360 raise util.Abort(_('working directory parents do not match unshelve '
361 'state'))
361 'state'))
362
362
363 def pathtofiles(repo, files):
363 def pathtofiles(repo, files):
364 cwd = repo.getcwd()
364 cwd = repo.getcwd()
365 return [repo.pathto(f, cwd) for f in files]
365 return [repo.pathto(f, cwd) for f in files]
366
366
367 def unshelveabort(ui, repo, state, opts):
367 def unshelveabort(ui, repo, state, opts):
368 """subcommand that abort an in-progress unshelve"""
368 """subcommand that abort an in-progress unshelve"""
369 wlock = repo.wlock()
369 wlock = repo.wlock()
370 lock = None
370 lock = None
371 try:
371 try:
372 checkparents(repo, state)
372 checkparents(repo, state)
373
373
374 util.rename(repo.join('unshelverebasestate'),
374 util.rename(repo.join('unshelverebasestate'),
375 repo.join('rebasestate'))
375 repo.join('rebasestate'))
376 try:
376 try:
377 rebase.rebase(ui, repo, **{
377 rebase.rebase(ui, repo, **{
378 'abort' : True
378 'abort' : True
379 })
379 })
380 except Exception:
380 except Exception:
381 util.rename(repo.join('rebasestate'),
381 util.rename(repo.join('rebasestate'),
382 repo.join('unshelverebasestate'))
382 repo.join('unshelverebasestate'))
383 raise
383 raise
384
384
385 lock = repo.lock()
385 lock = repo.lock()
386
386
387 mergefiles(ui, repo, state.wctx, state.pendingctx)
387 mergefiles(ui, repo, state.wctx, state.pendingctx)
388
388
389 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
389 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
390 shelvedstate.clear(repo)
390 shelvedstate.clear(repo)
391 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
391 ui.warn(_("unshelve of '%s' aborted\n") % state.name)
392 finally:
392 finally:
393 lockmod.release(lock, wlock)
393 lockmod.release(lock, wlock)
394
394
395 def mergefiles(ui, repo, wctx, shelvectx):
395 def mergefiles(ui, repo, wctx, shelvectx):
396 """updates to wctx and merges the changes from shelvectx into the
396 """updates to wctx and merges the changes from shelvectx into the
397 dirstate."""
397 dirstate."""
398 oldquiet = ui.quiet
398 oldquiet = ui.quiet
399 try:
399 try:
400 ui.quiet = True
400 ui.quiet = True
401 hg.update(repo, wctx.node())
401 hg.update(repo, wctx.node())
402 files = []
402 files = []
403 files.extend(shelvectx.files())
403 files.extend(shelvectx.files())
404 files.extend(shelvectx.parents()[0].files())
404 files.extend(shelvectx.parents()[0].files())
405
405
406 # revert will overwrite unknown files, so move them out of the way
406 # revert will overwrite unknown files, so move them out of the way
407 for file in repo.status(unknown=True).unknown:
407 for file in repo.status(unknown=True).unknown:
408 if file in files:
408 if file in files:
409 util.rename(file, file + ".orig")
409 util.rename(file, file + ".orig")
410 ui.pushbuffer(True)
410 ui.pushbuffer(True)
411 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
411 cmdutil.revert(ui, repo, shelvectx, repo.dirstate.parents(),
412 *pathtofiles(repo, files),
412 *pathtofiles(repo, files),
413 **{'no_backup': True})
413 **{'no_backup': True})
414 ui.popbuffer()
414 ui.popbuffer()
415 finally:
415 finally:
416 ui.quiet = oldquiet
416 ui.quiet = oldquiet
417
417
418 def unshelvecleanup(ui, repo, name, opts):
418 def unshelvecleanup(ui, repo, name, opts):
419 """remove related files after an unshelve"""
419 """remove related files after an unshelve"""
420 if not opts['keep']:
420 if not opts['keep']:
421 for filetype in 'hg patch'.split():
421 for filetype in 'hg patch'.split():
422 shelvedfile(repo, name, filetype).unlink()
422 shelvedfile(repo, name, filetype).unlink()
423
423
424 def unshelvecontinue(ui, repo, state, opts):
424 def unshelvecontinue(ui, repo, state, opts):
425 """subcommand to continue an in-progress unshelve"""
425 """subcommand to continue an in-progress unshelve"""
426 # We're finishing off a merge. First parent is our original
426 # We're finishing off a merge. First parent is our original
427 # parent, second is the temporary "fake" commit we're unshelving.
427 # parent, second is the temporary "fake" commit we're unshelving.
428 wlock = repo.wlock()
428 wlock = repo.wlock()
429 lock = None
429 lock = None
430 try:
430 try:
431 checkparents(repo, state)
431 checkparents(repo, state)
432 ms = merge.mergestate(repo)
432 ms = merge.mergestate(repo)
433 if [f for f in ms if ms[f] == 'u']:
433 if [f for f in ms if ms[f] == 'u']:
434 raise util.Abort(
434 raise util.Abort(
435 _("unresolved conflicts, can't continue"),
435 _("unresolved conflicts, can't continue"),
436 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
436 hint=_("see 'hg resolve', then 'hg unshelve --continue'"))
437
437
438 lock = repo.lock()
438 lock = repo.lock()
439
439
440 util.rename(repo.join('unshelverebasestate'),
440 util.rename(repo.join('unshelverebasestate'),
441 repo.join('rebasestate'))
441 repo.join('rebasestate'))
442 try:
442 try:
443 rebase.rebase(ui, repo, **{
443 rebase.rebase(ui, repo, **{
444 'continue' : True
444 'continue' : True
445 })
445 })
446 except Exception:
446 except Exception:
447 util.rename(repo.join('rebasestate'),
447 util.rename(repo.join('rebasestate'),
448 repo.join('unshelverebasestate'))
448 repo.join('unshelverebasestate'))
449 raise
449 raise
450
450
451 shelvectx = repo['tip']
451 shelvectx = repo['tip']
452 if not shelvectx in state.pendingctx.children():
452 if not shelvectx in state.pendingctx.children():
453 # rebase was a no-op, so it produced no child commit
453 # rebase was a no-op, so it produced no child commit
454 shelvectx = state.pendingctx
454 shelvectx = state.pendingctx
455 else:
455 else:
456 # only strip the shelvectx if the rebase produced it
456 # only strip the shelvectx if the rebase produced it
457 state.stripnodes.append(shelvectx.node())
457 state.stripnodes.append(shelvectx.node())
458
458
459 mergefiles(ui, repo, state.wctx, shelvectx)
459 mergefiles(ui, repo, state.wctx, shelvectx)
460
460
461 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
461 repair.strip(ui, repo, state.stripnodes, backup=False, topic='shelve')
462 shelvedstate.clear(repo)
462 shelvedstate.clear(repo)
463 unshelvecleanup(ui, repo, state.name, opts)
463 unshelvecleanup(ui, repo, state.name, opts)
464 ui.status(_("unshelve of '%s' complete\n") % state.name)
464 ui.status(_("unshelve of '%s' complete\n") % state.name)
465 finally:
465 finally:
466 lockmod.release(lock, wlock)
466 lockmod.release(lock, wlock)
467
467
468 @command('unshelve',
468 @command('unshelve',
469 [('a', 'abort', None,
469 [('a', 'abort', None,
470 _('abort an incomplete unshelve operation')),
470 _('abort an incomplete unshelve operation')),
471 ('c', 'continue', None,
471 ('c', 'continue', None,
472 _('continue an incomplete unshelve operation')),
472 _('continue an incomplete unshelve operation')),
473 ('', 'keep', None,
473 ('', 'keep', None,
474 _('keep shelve after unshelving')),
474 _('keep shelve after unshelving')),
475 ('', 'date', '',
475 ('', 'date', '',
476 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
476 _('set date for temporary commits (DEPRECATED)'), _('DATE'))],
477 _('hg unshelve [SHELVED]'))
477 _('hg unshelve [SHELVED]'))
478 def unshelve(ui, repo, *shelved, **opts):
478 def unshelve(ui, repo, *shelved, **opts):
479 """restore a shelved change to the working directory
479 """restore a shelved change to the working directory
480
480
481 This command accepts an optional name of a shelved change to
481 This command accepts an optional name of a shelved change to
482 restore. If none is given, the most recent shelved change is used.
482 restore. If none is given, the most recent shelved change is used.
483
483
484 If a shelved change is applied successfully, the bundle that
484 If a shelved change is applied successfully, the bundle that
485 contains the shelved changes is deleted afterwards.
485 contains the shelved changes is deleted afterwards.
486
486
487 Since you can restore a shelved change on top of an arbitrary
487 Since you can restore a shelved change on top of an arbitrary
488 commit, it is possible that unshelving will result in a conflict
488 commit, it is possible that unshelving will result in a conflict
489 between your changes and the commits you are unshelving onto. If
489 between your changes and the commits you are unshelving onto. If
490 this occurs, you must resolve the conflict, then use
490 this occurs, you must resolve the conflict, then use
491 ``--continue`` to complete the unshelve operation. (The bundle
491 ``--continue`` to complete the unshelve operation. (The bundle
492 will not be deleted until you successfully complete the unshelve.)
492 will not be deleted until you successfully complete the unshelve.)
493
493
494 (Alternatively, you can use ``--abort`` to abandon an unshelve
494 (Alternatively, you can use ``--abort`` to abandon an unshelve
495 that causes a conflict. This reverts the unshelved changes, and
495 that causes a conflict. This reverts the unshelved changes, and
496 does not delete the bundle.)
496 does not delete the bundle.)
497 """
497 """
498 abortf = opts['abort']
498 abortf = opts['abort']
499 continuef = opts['continue']
499 continuef = opts['continue']
500 if not abortf and not continuef:
500 if not abortf and not continuef:
501 cmdutil.checkunfinished(repo)
501 cmdutil.checkunfinished(repo)
502
502
503 if abortf or continuef:
503 if abortf or continuef:
504 if abortf and continuef:
504 if abortf and continuef:
505 raise util.Abort(_('cannot use both abort and continue'))
505 raise util.Abort(_('cannot use both abort and continue'))
506 if shelved:
506 if shelved:
507 raise util.Abort(_('cannot combine abort/continue with '
507 raise util.Abort(_('cannot combine abort/continue with '
508 'naming a shelved change'))
508 'naming a shelved change'))
509
509
510 try:
510 try:
511 state = shelvedstate.load(repo)
511 state = shelvedstate.load(repo)
512 except IOError, err:
512 except IOError, err:
513 if err.errno != errno.ENOENT:
513 if err.errno != errno.ENOENT:
514 raise
514 raise
515 raise util.Abort(_('no unshelve operation underway'))
515 raise util.Abort(_('no unshelve operation underway'))
516
516
517 if abortf:
517 if abortf:
518 return unshelveabort(ui, repo, state, opts)
518 return unshelveabort(ui, repo, state, opts)
519 elif continuef:
519 elif continuef:
520 return unshelvecontinue(ui, repo, state, opts)
520 return unshelvecontinue(ui, repo, state, opts)
521 elif len(shelved) > 1:
521 elif len(shelved) > 1:
522 raise util.Abort(_('can only unshelve one change at a time'))
522 raise util.Abort(_('can only unshelve one change at a time'))
523 elif not shelved:
523 elif not shelved:
524 shelved = listshelves(repo)
524 shelved = listshelves(repo)
525 if not shelved:
525 if not shelved:
526 raise util.Abort(_('no shelved changes to apply!'))
526 raise util.Abort(_('no shelved changes to apply!'))
527 basename = util.split(shelved[0][1])[1]
527 basename = util.split(shelved[0][1])[1]
528 ui.status(_("unshelving change '%s'\n") % basename)
528 ui.status(_("unshelving change '%s'\n") % basename)
529 else:
529 else:
530 basename = shelved[0]
530 basename = shelved[0]
531
531
532 if not shelvedfile(repo, basename, 'patch').exists():
532 if not shelvedfile(repo, basename, 'patch').exists():
533 raise util.Abort(_("shelved change '%s' not found") % basename)
533 raise util.Abort(_("shelved change '%s' not found") % basename)
534
534
535 oldquiet = ui.quiet
535 oldquiet = ui.quiet
536 wlock = lock = tr = None
536 wlock = lock = tr = None
537 try:
537 try:
538 lock = repo.lock()
538 lock = repo.lock()
539 wlock = repo.wlock()
539 wlock = repo.wlock()
540
540
541 tr = repo.transaction('unshelve', report=lambda x: None)
541 tr = repo.transaction('unshelve', report=lambda x: None)
542 oldtiprev = len(repo)
542 oldtiprev = len(repo)
543
543
544 pctx = repo['.']
544 pctx = repo['.']
545 tmpwctx = pctx
545 tmpwctx = pctx
546 # The goal is to have a commit structure like so:
546 # The goal is to have a commit structure like so:
547 # ...-> pctx -> tmpwctx -> shelvectx
547 # ...-> pctx -> tmpwctx -> shelvectx
548 # where tmpwctx is an optional commit with the user's pending changes
548 # where tmpwctx is an optional commit with the user's pending changes
549 # and shelvectx is the unshelved changes. Then we merge it all down
549 # and shelvectx is the unshelved changes. Then we merge it all down
550 # to the original pctx.
550 # to the original pctx.
551
551
552 # Store pending changes in a commit
552 # Store pending changes in a commit
553 s = repo.status()
553 s = repo.status()
554 if s.modified or s.added or s.removed or s.deleted:
554 if s.modified or s.added or s.removed or s.deleted:
555 ui.status(_("temporarily committing pending changes "
555 ui.status(_("temporarily committing pending changes "
556 "(restore with 'hg unshelve --abort')\n"))
556 "(restore with 'hg unshelve --abort')\n"))
557 def commitfunc(ui, repo, message, match, opts):
557 def commitfunc(ui, repo, message, match, opts):
558 hasmq = util.safehasattr(repo, 'mq')
558 hasmq = util.safehasattr(repo, 'mq')
559 if hasmq:
559 if hasmq:
560 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
560 saved, repo.mq.checkapplied = repo.mq.checkapplied, False
561
561
562 backup = repo.ui.backupconfig('phases', 'new-commit')
562 backup = repo.ui.backupconfig('phases', 'new-commit')
563 try:
563 try:
564 repo.ui. setconfig('phases', 'new-commit', phases.secret)
564 repo.ui. setconfig('phases', 'new-commit', phases.secret)
565 return repo.commit(message, 'shelve@localhost',
565 return repo.commit(message, 'shelve@localhost',
566 opts.get('date'), match)
566 opts.get('date'), match)
567 finally:
567 finally:
568 repo.ui.restoreconfig(backup)
568 repo.ui.restoreconfig(backup)
569 if hasmq:
569 if hasmq:
570 repo.mq.checkapplied = saved
570 repo.mq.checkapplied = saved
571
571
572 tempopts = {}
572 tempopts = {}
573 tempopts['message'] = "pending changes temporary commit"
573 tempopts['message'] = "pending changes temporary commit"
574 tempopts['date'] = opts.get('date')
574 tempopts['date'] = opts.get('date')
575 ui.quiet = True
575 ui.quiet = True
576 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
576 node = cmdutil.commit(ui, repo, commitfunc, [], tempopts)
577 tmpwctx = repo[node]
577 tmpwctx = repo[node]
578
578
579 ui.quiet = True
579 ui.quiet = True
580 shelvedfile(repo, basename, 'hg').applybundle()
580 shelvedfile(repo, basename, 'hg').applybundle()
581
581
582 ui.quiet = oldquiet
582 ui.quiet = oldquiet
583
583
584 shelvectx = repo['tip']
584 shelvectx = repo['tip']
585
585
586 # If the shelve is not immediately on top of the commit
586 # If the shelve is not immediately on top of the commit
587 # we'll be merging with, rebase it to be on top.
587 # we'll be merging with, rebase it to be on top.
588 if tmpwctx.node() != shelvectx.parents()[0].node():
588 if tmpwctx.node() != shelvectx.parents()[0].node():
589 ui.status(_('rebasing shelved changes\n'))
589 ui.status(_('rebasing shelved changes\n'))
590 try:
590 try:
591 rebase.rebase(ui, repo, **{
591 rebase.rebase(ui, repo, **{
592 'rev' : [shelvectx.rev()],
592 'rev' : [shelvectx.rev()],
593 'dest' : str(tmpwctx.rev()),
593 'dest' : str(tmpwctx.rev()),
594 'keep' : True,
594 'keep' : True,
595 })
595 })
596 except error.InterventionRequired:
596 except error.InterventionRequired:
597 tr.close()
597 tr.close()
598
598
599 stripnodes = [repo.changelog.node(rev)
599 stripnodes = [repo.changelog.node(rev)
600 for rev in xrange(oldtiprev, len(repo))]
600 for rev in xrange(oldtiprev, len(repo))]
601 shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes)
601 shelvedstate.save(repo, basename, pctx, tmpwctx, stripnodes)
602
602
603 util.rename(repo.join('rebasestate'),
603 util.rename(repo.join('rebasestate'),
604 repo.join('unshelverebasestate'))
604 repo.join('unshelverebasestate'))
605 raise error.InterventionRequired(
605 raise error.InterventionRequired(
606 _("unresolved conflicts (see 'hg resolve', then "
606 _("unresolved conflicts (see 'hg resolve', then "
607 "'hg unshelve --continue')"))
607 "'hg unshelve --continue')"))
608
608
609 # refresh ctx after rebase completes
609 # refresh ctx after rebase completes
610 shelvectx = repo['tip']
610 shelvectx = repo['tip']
611
611
612 if not shelvectx in tmpwctx.children():
612 if not shelvectx in tmpwctx.children():
613 # rebase was a no-op, so it produced no child commit
613 # rebase was a no-op, so it produced no child commit
614 shelvectx = tmpwctx
614 shelvectx = tmpwctx
615
615
616 mergefiles(ui, repo, pctx, shelvectx)
616 mergefiles(ui, repo, pctx, shelvectx)
617 shelvedstate.clear(repo)
617 shelvedstate.clear(repo)
618
618
619 # The transaction aborting will strip all the commits for us,
619 # The transaction aborting will strip all the commits for us,
620 # but it doesn't update the inmemory structures, so addchangegroup
620 # but it doesn't update the inmemory structures, so addchangegroup
621 # hooks still fire and try to operate on the missing commits.
621 # hooks still fire and try to operate on the missing commits.
622 # Clean up manually to prevent this.
622 # Clean up manually to prevent this.
623 repo.unfiltered().changelog.strip(oldtiprev, tr)
623 repo.unfiltered().changelog.strip(oldtiprev, tr)
624
624
625 unshelvecleanup(ui, repo, basename, opts)
625 unshelvecleanup(ui, repo, basename, opts)
626 finally:
626 finally:
627 ui.quiet = oldquiet
627 ui.quiet = oldquiet
628 if tr:
628 if tr:
629 tr.release()
629 tr.release()
630 lockmod.release(lock, wlock)
630 lockmod.release(lock, wlock)
631
631
632 @command('shelve',
632 @command('shelve',
633 [('A', 'addremove', None,
633 [('A', 'addremove', None,
634 _('mark new/missing files as added/removed before shelving')),
634 _('mark new/missing files as added/removed before shelving')),
635 ('', 'cleanup', None,
635 ('', 'cleanup', None,
636 _('delete all shelved changes')),
636 _('delete all shelved changes')),
637 ('', 'date', '',
637 ('', 'date', '',
638 _('shelve with the specified commit date'), _('DATE')),
638 _('shelve with the specified commit date'), _('DATE')),
639 ('d', 'delete', None,
639 ('d', 'delete', None,
640 _('delete the named shelved change(s)')),
640 _('delete the named shelved change(s)')),
641 ('e', 'edit', False,
641 ('e', 'edit', False,
642 _('invoke editor on commit messages')),
642 _('invoke editor on commit messages')),
643 ('l', 'list', None,
643 ('l', 'list', None,
644 _('list current shelves')),
644 _('list current shelves')),
645 ('m', 'message', '',
645 ('m', 'message', '',
646 _('use text as shelve message'), _('TEXT')),
646 _('use text as shelve message'), _('TEXT')),
647 ('n', 'name', '',
647 ('n', 'name', '',
648 _('use the given name for the shelved commit'), _('NAME')),
648 _('use the given name for the shelved commit'), _('NAME')),
649 ('p', 'patch', None,
649 ('p', 'patch', None,
650 _('show patch')),
650 _('show patch')),
651 ('', 'stat', None,
651 ('', 'stat', None,
652 _('output diffstat-style summary of changes'))] + commands.walkopts,
652 _('output diffstat-style summary of changes'))] + commands.walkopts,
653 _('hg shelve [OPTION]... [FILE]...'))
653 _('hg shelve [OPTION]... [FILE]...'))
654 def shelvecmd(ui, repo, *pats, **opts):
654 def shelvecmd(ui, repo, *pats, **opts):
655 '''save and set aside changes from the working directory
655 '''save and set aside changes from the working directory
656
656
657 Shelving takes files that "hg status" reports as not clean, saves
657 Shelving takes files that "hg status" reports as not clean, saves
658 the modifications to a bundle (a shelved change), and reverts the
658 the modifications to a bundle (a shelved change), and reverts the
659 files so that their state in the working directory becomes clean.
659 files so that their state in the working directory becomes clean.
660
660
661 To restore these changes to the working directory, using "hg
661 To restore these changes to the working directory, using "hg
662 unshelve"; this will work even if you switch to a different
662 unshelve"; this will work even if you switch to a different
663 commit.
663 commit.
664
664
665 When no files are specified, "hg shelve" saves all not-clean
665 When no files are specified, "hg shelve" saves all not-clean
666 files. If specific files or directories are named, only changes to
666 files. If specific files or directories are named, only changes to
667 those files are shelved.
667 those files are shelved.
668
668
669 Each shelved change has a name that makes it easier to find later.
669 Each shelved change has a name that makes it easier to find later.
670 The name of a shelved change defaults to being based on the active
670 The name of a shelved change defaults to being based on the active
671 bookmark, or if there is no active bookmark, the current named
671 bookmark, or if there is no active bookmark, the current named
672 branch. To specify a different name, use ``--name``.
672 branch. To specify a different name, use ``--name``.
673
673
674 To see a list of existing shelved changes, use the ``--list``
674 To see a list of existing shelved changes, use the ``--list``
675 option. For each shelved change, this will print its name, age,
675 option. For each shelved change, this will print its name, age,
676 and description; use ``--patch`` or ``--stat`` for more details.
676 and description; use ``--patch`` or ``--stat`` for more details.
677
677
678 To delete specific shelved changes, use ``--delete``. To delete
678 To delete specific shelved changes, use ``--delete``. To delete
679 all shelved changes, use ``--cleanup``.
679 all shelved changes, use ``--cleanup``.
680 '''
680 '''
681 cmdutil.checkunfinished(repo)
681 cmdutil.checkunfinished(repo)
682
682
683 allowables = [
683 allowables = [
684 ('addremove', 'create'), # 'create' is pseudo action
684 ('addremove', 'create'), # 'create' is pseudo action
685 ('cleanup', 'cleanup'),
685 ('cleanup', 'cleanup'),
686 # ('date', 'create'), # ignored for passing '--date "0 0"' in tests
686 # ('date', 'create'), # ignored for passing '--date "0 0"' in tests
687 ('delete', 'delete'),
687 ('delete', 'delete'),
688 ('edit', 'create'),
688 ('edit', 'create'),
689 ('list', 'list'),
689 ('list', 'list'),
690 ('message', 'create'),
690 ('message', 'create'),
691 ('name', 'create'),
691 ('name', 'create'),
692 ('patch', 'list'),
692 ('patch', 'list'),
693 ('stat', 'list'),
693 ('stat', 'list'),
694 ]
694 ]
695 def checkopt(opt):
695 def checkopt(opt):
696 if opts[opt]:
696 if opts[opt]:
697 for i, allowable in allowables:
697 for i, allowable in allowables:
698 if opts[i] and opt != allowable:
698 if opts[i] and opt != allowable:
699 raise util.Abort(_("options '--%s' and '--%s' may not be "
699 raise util.Abort(_("options '--%s' and '--%s' may not be "
700 "used together") % (opt, i))
700 "used together") % (opt, i))
701 return True
701 return True
702 if checkopt('cleanup'):
702 if checkopt('cleanup'):
703 if pats:
703 if pats:
704 raise util.Abort(_("cannot specify names when using '--cleanup'"))
704 raise util.Abort(_("cannot specify names when using '--cleanup'"))
705 return cleanupcmd(ui, repo)
705 return cleanupcmd(ui, repo)
706 elif checkopt('delete'):
706 elif checkopt('delete'):
707 return deletecmd(ui, repo, pats)
707 return deletecmd(ui, repo, pats)
708 elif checkopt('list'):
708 elif checkopt('list'):
709 return listcmd(ui, repo, pats, opts)
709 return listcmd(ui, repo, pats, opts)
710 else:
710 else:
711 for i in ('patch', 'stat'):
711 for i in ('patch', 'stat'):
712 if opts[i]:
712 if opts[i]:
713 raise util.Abort(_("option '--%s' may not be "
713 raise util.Abort(_("option '--%s' may not be "
714 "used when shelving a change") % (i,))
714 "used when shelving a change") % (i,))
715 return createcmd(ui, repo, pats, opts)
715 return createcmd(ui, repo, pats, opts)
716
716
717 def extsetup(ui):
717 def extsetup(ui):
718 cmdutil.unfinishedstates.append(
718 cmdutil.unfinishedstates.append(
719 [shelvedstate._filename, False, False,
719 [shelvedstate._filename, False, False,
720 _('unshelve already in progress'),
720 _('unshelve already in progress'),
721 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
721 _("use 'hg unshelve --continue' or 'hg unshelve --abort'")])
@@ -1,460 +1,460 b''
1 # Mercurial bookmark support code
1 # Mercurial bookmark support code
2 #
2 #
3 # Copyright 2008 David Soria Parra <dsp@php.net>
3 # Copyright 2008 David Soria Parra <dsp@php.net>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial.node import hex, bin
10 from mercurial.node import hex, bin
11 from mercurial import encoding, error, util, obsolete, lock as lockmod
11 from mercurial import encoding, error, util, obsolete, lock as lockmod
12 import errno
12 import errno
13
13
14 class bmstore(dict):
14 class bmstore(dict):
15 """Storage for bookmarks.
15 """Storage for bookmarks.
16
16
17 This object should do all bookmark reads and writes, so that it's
17 This object should do all bookmark reads and writes, so that it's
18 fairly simple to replace the storage underlying bookmarks without
18 fairly simple to replace the storage underlying bookmarks without
19 having to clone the logic surrounding bookmarks.
19 having to clone the logic surrounding bookmarks.
20
20
21 This particular bmstore implementation stores bookmarks as
21 This particular bmstore implementation stores bookmarks as
22 {hash}\s{name}\n (the same format as localtags) in
22 {hash}\s{name}\n (the same format as localtags) in
23 .hg/bookmarks. The mapping is stored as {name: nodeid}.
23 .hg/bookmarks. The mapping is stored as {name: nodeid}.
24
24
25 This class does NOT handle the "current" bookmark state at this
25 This class does NOT handle the "current" bookmark state at this
26 time.
26 time.
27 """
27 """
28
28
29 def __init__(self, repo):
29 def __init__(self, repo):
30 dict.__init__(self)
30 dict.__init__(self)
31 self._repo = repo
31 self._repo = repo
32 try:
32 try:
33 bkfile = self.getbkfile(repo)
33 bkfile = self.getbkfile(repo)
34 for line in bkfile:
34 for line in bkfile:
35 line = line.strip()
35 line = line.strip()
36 if not line:
36 if not line:
37 continue
37 continue
38 if ' ' not in line:
38 if ' ' not in line:
39 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
39 repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n')
40 % line)
40 % line)
41 continue
41 continue
42 sha, refspec = line.split(' ', 1)
42 sha, refspec = line.split(' ', 1)
43 refspec = encoding.tolocal(refspec)
43 refspec = encoding.tolocal(refspec)
44 try:
44 try:
45 self[refspec] = repo.changelog.lookup(sha)
45 self[refspec] = repo.changelog.lookup(sha)
46 except LookupError:
46 except LookupError:
47 pass
47 pass
48 except IOError, inst:
48 except IOError, inst:
49 if inst.errno != errno.ENOENT:
49 if inst.errno != errno.ENOENT:
50 raise
50 raise
51
51
52 def getbkfile(self, repo):
52 def getbkfile(self, repo):
53 bkfile = None
53 bkfile = None
54 if 'HG_PENDING' in os.environ:
54 if 'HG_PENDING' in os.environ:
55 try:
55 try:
56 bkfile = repo.vfs('bookmarks.pending')
56 bkfile = repo.vfs('bookmarks.pending')
57 except IOError, inst:
57 except IOError, inst:
58 if inst.errno != errno.ENOENT:
58 if inst.errno != errno.ENOENT:
59 raise
59 raise
60 if bkfile is None:
60 if bkfile is None:
61 bkfile = repo.vfs('bookmarks')
61 bkfile = repo.vfs('bookmarks')
62 return bkfile
62 return bkfile
63
63
64 def recordchange(self, tr):
64 def recordchange(self, tr):
65 """record that bookmarks have been changed in a transaction
65 """record that bookmarks have been changed in a transaction
66
66
67 The transaction is then responsible for updating the file content."""
67 The transaction is then responsible for updating the file content."""
68 tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
68 tr.addfilegenerator('bookmarks', ('bookmarks',), self._write,
69 location='plain')
69 location='plain')
70 tr.hookargs['bookmark_moved'] = '1'
70 tr.hookargs['bookmark_moved'] = '1'
71
71
72 def write(self):
72 def write(self):
73 '''Write bookmarks
73 '''Write bookmarks
74
74
75 Write the given bookmark => hash dictionary to the .hg/bookmarks file
75 Write the given bookmark => hash dictionary to the .hg/bookmarks file
76 in a format equal to those of localtags.
76 in a format equal to those of localtags.
77
77
78 We also store a backup of the previous state in undo.bookmarks that
78 We also store a backup of the previous state in undo.bookmarks that
79 can be copied back on rollback.
79 can be copied back on rollback.
80 '''
80 '''
81 repo = self._repo
81 repo = self._repo
82 self._writerepo(repo)
82 self._writerepo(repo)
83
83
84 def _writerepo(self, repo):
84 def _writerepo(self, repo):
85 """Factored out for extensibility"""
85 """Factored out for extensibility"""
86 if repo._bookmarkcurrent not in self:
86 if repo._bookmarkcurrent not in self:
87 unsetcurrent(repo)
87 unsetcurrent(repo)
88
88
89 wlock = repo.wlock()
89 wlock = repo.wlock()
90 try:
90 try:
91
91
92 file = repo.vfs('bookmarks', 'w', atomictemp=True)
92 file = repo.vfs('bookmarks', 'w', atomictemp=True)
93 self._write(file)
93 self._write(file)
94 file.close()
94 file.close()
95
95
96 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
96 # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
97 try:
97 try:
98 repo.svfs.utime('00changelog.i', None)
98 repo.svfs.utime('00changelog.i', None)
99 except OSError:
99 except OSError:
100 pass
100 pass
101
101
102 finally:
102 finally:
103 wlock.release()
103 wlock.release()
104
104
105 def _write(self, fp):
105 def _write(self, fp):
106 for name, node in self.iteritems():
106 for name, node in self.iteritems():
107 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
107 fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
108
108
109 def readcurrent(repo):
109 def readcurrent(repo):
110 '''Get the current bookmark
110 '''Get the current bookmark
111
111
112 If we use gittish branches we have a current bookmark that
112 If we use gittish branches we have a current bookmark that
113 we are on. This function returns the name of the bookmark. It
113 we are on. This function returns the name of the bookmark. It
114 is stored in .hg/bookmarks.current
114 is stored in .hg/bookmarks.current
115 '''
115 '''
116 mark = None
116 mark = None
117 try:
117 try:
118 file = repo.opener('bookmarks.current')
118 file = repo.vfs('bookmarks.current')
119 except IOError, inst:
119 except IOError, inst:
120 if inst.errno != errno.ENOENT:
120 if inst.errno != errno.ENOENT:
121 raise
121 raise
122 return None
122 return None
123 try:
123 try:
124 # No readline() in osutil.posixfile, reading everything is cheap
124 # No readline() in osutil.posixfile, reading everything is cheap
125 mark = encoding.tolocal((file.readlines() or [''])[0])
125 mark = encoding.tolocal((file.readlines() or [''])[0])
126 if mark == '' or mark not in repo._bookmarks:
126 if mark == '' or mark not in repo._bookmarks:
127 mark = None
127 mark = None
128 finally:
128 finally:
129 file.close()
129 file.close()
130 return mark
130 return mark
131
131
132 def setcurrent(repo, mark):
132 def setcurrent(repo, mark):
133 '''Set the name of the bookmark that we are currently on
133 '''Set the name of the bookmark that we are currently on
134
134
135 Set the name of the bookmark that we are on (hg update <bookmark>).
135 Set the name of the bookmark that we are on (hg update <bookmark>).
136 The name is recorded in .hg/bookmarks.current
136 The name is recorded in .hg/bookmarks.current
137 '''
137 '''
138 if mark not in repo._bookmarks:
138 if mark not in repo._bookmarks:
139 raise AssertionError('bookmark %s does not exist!' % mark)
139 raise AssertionError('bookmark %s does not exist!' % mark)
140
140
141 current = repo._bookmarkcurrent
141 current = repo._bookmarkcurrent
142 if current == mark:
142 if current == mark:
143 return
143 return
144
144
145 wlock = repo.wlock()
145 wlock = repo.wlock()
146 try:
146 try:
147 file = repo.opener('bookmarks.current', 'w', atomictemp=True)
147 file = repo.vfs('bookmarks.current', 'w', atomictemp=True)
148 file.write(encoding.fromlocal(mark))
148 file.write(encoding.fromlocal(mark))
149 file.close()
149 file.close()
150 finally:
150 finally:
151 wlock.release()
151 wlock.release()
152 repo._bookmarkcurrent = mark
152 repo._bookmarkcurrent = mark
153
153
154 def unsetcurrent(repo):
154 def unsetcurrent(repo):
155 wlock = repo.wlock()
155 wlock = repo.wlock()
156 try:
156 try:
157 try:
157 try:
158 repo.vfs.unlink('bookmarks.current')
158 repo.vfs.unlink('bookmarks.current')
159 repo._bookmarkcurrent = None
159 repo._bookmarkcurrent = None
160 except OSError, inst:
160 except OSError, inst:
161 if inst.errno != errno.ENOENT:
161 if inst.errno != errno.ENOENT:
162 raise
162 raise
163 finally:
163 finally:
164 wlock.release()
164 wlock.release()
165
165
166 def iscurrent(repo, mark=None, parents=None):
166 def iscurrent(repo, mark=None, parents=None):
167 '''Tell whether the current bookmark is also active
167 '''Tell whether the current bookmark is also active
168
168
169 I.e., the bookmark listed in .hg/bookmarks.current also points to a
169 I.e., the bookmark listed in .hg/bookmarks.current also points to a
170 parent of the working directory.
170 parent of the working directory.
171 '''
171 '''
172 if not mark:
172 if not mark:
173 mark = repo._bookmarkcurrent
173 mark = repo._bookmarkcurrent
174 if not parents:
174 if not parents:
175 parents = [p.node() for p in repo[None].parents()]
175 parents = [p.node() for p in repo[None].parents()]
176 marks = repo._bookmarks
176 marks = repo._bookmarks
177 return (mark in marks and marks[mark] in parents)
177 return (mark in marks and marks[mark] in parents)
178
178
179 def updatecurrentbookmark(repo, oldnode, curbranch):
179 def updatecurrentbookmark(repo, oldnode, curbranch):
180 try:
180 try:
181 return update(repo, oldnode, repo.branchtip(curbranch))
181 return update(repo, oldnode, repo.branchtip(curbranch))
182 except error.RepoLookupError:
182 except error.RepoLookupError:
183 if curbranch == "default": # no default branch!
183 if curbranch == "default": # no default branch!
184 return update(repo, oldnode, repo.lookup("tip"))
184 return update(repo, oldnode, repo.lookup("tip"))
185 else:
185 else:
186 raise util.Abort(_("branch %s not found") % curbranch)
186 raise util.Abort(_("branch %s not found") % curbranch)
187
187
188 def deletedivergent(repo, deletefrom, bm):
188 def deletedivergent(repo, deletefrom, bm):
189 '''Delete divergent versions of bm on nodes in deletefrom.
189 '''Delete divergent versions of bm on nodes in deletefrom.
190
190
191 Return True if at least one bookmark was deleted, False otherwise.'''
191 Return True if at least one bookmark was deleted, False otherwise.'''
192 deleted = False
192 deleted = False
193 marks = repo._bookmarks
193 marks = repo._bookmarks
194 divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
194 divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]]
195 for mark in divergent:
195 for mark in divergent:
196 if mark == '@' or '@' not in mark:
196 if mark == '@' or '@' not in mark:
197 # can't be divergent by definition
197 # can't be divergent by definition
198 continue
198 continue
199 if mark and marks[mark] in deletefrom:
199 if mark and marks[mark] in deletefrom:
200 if mark != bm:
200 if mark != bm:
201 del marks[mark]
201 del marks[mark]
202 deleted = True
202 deleted = True
203 return deleted
203 return deleted
204
204
205 def calculateupdate(ui, repo, checkout):
205 def calculateupdate(ui, repo, checkout):
206 '''Return a tuple (targetrev, movemarkfrom) indicating the rev to
206 '''Return a tuple (targetrev, movemarkfrom) indicating the rev to
207 check out and where to move the active bookmark from, if needed.'''
207 check out and where to move the active bookmark from, if needed.'''
208 movemarkfrom = None
208 movemarkfrom = None
209 if checkout is None:
209 if checkout is None:
210 curmark = repo._bookmarkcurrent
210 curmark = repo._bookmarkcurrent
211 if iscurrent(repo):
211 if iscurrent(repo):
212 movemarkfrom = repo['.'].node()
212 movemarkfrom = repo['.'].node()
213 elif curmark:
213 elif curmark:
214 ui.status(_("updating to active bookmark %s\n") % curmark)
214 ui.status(_("updating to active bookmark %s\n") % curmark)
215 checkout = curmark
215 checkout = curmark
216 return (checkout, movemarkfrom)
216 return (checkout, movemarkfrom)
217
217
218 def update(repo, parents, node):
218 def update(repo, parents, node):
219 deletefrom = parents
219 deletefrom = parents
220 marks = repo._bookmarks
220 marks = repo._bookmarks
221 update = False
221 update = False
222 cur = repo._bookmarkcurrent
222 cur = repo._bookmarkcurrent
223 if not cur:
223 if not cur:
224 return False
224 return False
225
225
226 if marks[cur] in parents:
226 if marks[cur] in parents:
227 new = repo[node]
227 new = repo[node]
228 divs = [repo[b] for b in marks
228 divs = [repo[b] for b in marks
229 if b.split('@', 1)[0] == cur.split('@', 1)[0]]
229 if b.split('@', 1)[0] == cur.split('@', 1)[0]]
230 anc = repo.changelog.ancestors([new.rev()])
230 anc = repo.changelog.ancestors([new.rev()])
231 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
231 deletefrom = [b.node() for b in divs if b.rev() in anc or b == new]
232 if validdest(repo, repo[marks[cur]], new):
232 if validdest(repo, repo[marks[cur]], new):
233 marks[cur] = new.node()
233 marks[cur] = new.node()
234 update = True
234 update = True
235
235
236 if deletedivergent(repo, deletefrom, cur):
236 if deletedivergent(repo, deletefrom, cur):
237 update = True
237 update = True
238
238
239 if update:
239 if update:
240 marks.write()
240 marks.write()
241 return update
241 return update
242
242
243 def listbookmarks(repo):
243 def listbookmarks(repo):
244 # We may try to list bookmarks on a repo type that does not
244 # We may try to list bookmarks on a repo type that does not
245 # support it (e.g., statichttprepository).
245 # support it (e.g., statichttprepository).
246 marks = getattr(repo, '_bookmarks', {})
246 marks = getattr(repo, '_bookmarks', {})
247
247
248 d = {}
248 d = {}
249 hasnode = repo.changelog.hasnode
249 hasnode = repo.changelog.hasnode
250 for k, v in marks.iteritems():
250 for k, v in marks.iteritems():
251 # don't expose local divergent bookmarks
251 # don't expose local divergent bookmarks
252 if hasnode(v) and ('@' not in k or k.endswith('@')):
252 if hasnode(v) and ('@' not in k or k.endswith('@')):
253 d[k] = hex(v)
253 d[k] = hex(v)
254 return d
254 return d
255
255
256 def pushbookmark(repo, key, old, new):
256 def pushbookmark(repo, key, old, new):
257 w = l = tr = None
257 w = l = tr = None
258 try:
258 try:
259 w = repo.wlock()
259 w = repo.wlock()
260 l = repo.lock()
260 l = repo.lock()
261 tr = repo.transaction('bookmarks')
261 tr = repo.transaction('bookmarks')
262 marks = repo._bookmarks
262 marks = repo._bookmarks
263 existing = hex(marks.get(key, ''))
263 existing = hex(marks.get(key, ''))
264 if existing != old and existing != new:
264 if existing != old and existing != new:
265 return False
265 return False
266 if new == '':
266 if new == '':
267 del marks[key]
267 del marks[key]
268 else:
268 else:
269 if new not in repo:
269 if new not in repo:
270 return False
270 return False
271 marks[key] = repo[new].node()
271 marks[key] = repo[new].node()
272 marks.recordchange(tr)
272 marks.recordchange(tr)
273 tr.close()
273 tr.close()
274 return True
274 return True
275 finally:
275 finally:
276 lockmod.release(tr, l, w)
276 lockmod.release(tr, l, w)
277
277
278 def compare(repo, srcmarks, dstmarks,
278 def compare(repo, srcmarks, dstmarks,
279 srchex=None, dsthex=None, targets=None):
279 srchex=None, dsthex=None, targets=None):
280 '''Compare bookmarks between srcmarks and dstmarks
280 '''Compare bookmarks between srcmarks and dstmarks
281
281
282 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
282 This returns tuple "(addsrc, adddst, advsrc, advdst, diverge,
283 differ, invalid)", each are list of bookmarks below:
283 differ, invalid)", each are list of bookmarks below:
284
284
285 :addsrc: added on src side (removed on dst side, perhaps)
285 :addsrc: added on src side (removed on dst side, perhaps)
286 :adddst: added on dst side (removed on src side, perhaps)
286 :adddst: added on dst side (removed on src side, perhaps)
287 :advsrc: advanced on src side
287 :advsrc: advanced on src side
288 :advdst: advanced on dst side
288 :advdst: advanced on dst side
289 :diverge: diverge
289 :diverge: diverge
290 :differ: changed, but changeset referred on src is unknown on dst
290 :differ: changed, but changeset referred on src is unknown on dst
291 :invalid: unknown on both side
291 :invalid: unknown on both side
292 :same: same on both side
292 :same: same on both side
293
293
294 Each elements of lists in result tuple is tuple "(bookmark name,
294 Each elements of lists in result tuple is tuple "(bookmark name,
295 changeset ID on source side, changeset ID on destination
295 changeset ID on source side, changeset ID on destination
296 side)". Each changeset IDs are 40 hexadecimal digit string or
296 side)". Each changeset IDs are 40 hexadecimal digit string or
297 None.
297 None.
298
298
299 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
299 Changeset IDs of tuples in "addsrc", "adddst", "differ" or
300 "invalid" list may be unknown for repo.
300 "invalid" list may be unknown for repo.
301
301
302 This function expects that "srcmarks" and "dstmarks" return
302 This function expects that "srcmarks" and "dstmarks" return
303 changeset ID in 40 hexadecimal digit string for specified
303 changeset ID in 40 hexadecimal digit string for specified
304 bookmark. If not so (e.g. bmstore "repo._bookmarks" returning
304 bookmark. If not so (e.g. bmstore "repo._bookmarks" returning
305 binary value), "srchex" or "dsthex" should be specified to convert
305 binary value), "srchex" or "dsthex" should be specified to convert
306 into such form.
306 into such form.
307
307
308 If "targets" is specified, only bookmarks listed in it are
308 If "targets" is specified, only bookmarks listed in it are
309 examined.
309 examined.
310 '''
310 '''
311 if not srchex:
311 if not srchex:
312 srchex = lambda x: x
312 srchex = lambda x: x
313 if not dsthex:
313 if not dsthex:
314 dsthex = lambda x: x
314 dsthex = lambda x: x
315
315
316 if targets:
316 if targets:
317 bset = set(targets)
317 bset = set(targets)
318 else:
318 else:
319 srcmarkset = set(srcmarks)
319 srcmarkset = set(srcmarks)
320 dstmarkset = set(dstmarks)
320 dstmarkset = set(dstmarks)
321 bset = srcmarkset | dstmarkset
321 bset = srcmarkset | dstmarkset
322
322
323 results = ([], [], [], [], [], [], [], [])
323 results = ([], [], [], [], [], [], [], [])
324 addsrc = results[0].append
324 addsrc = results[0].append
325 adddst = results[1].append
325 adddst = results[1].append
326 advsrc = results[2].append
326 advsrc = results[2].append
327 advdst = results[3].append
327 advdst = results[3].append
328 diverge = results[4].append
328 diverge = results[4].append
329 differ = results[5].append
329 differ = results[5].append
330 invalid = results[6].append
330 invalid = results[6].append
331 same = results[7].append
331 same = results[7].append
332
332
333 for b in sorted(bset):
333 for b in sorted(bset):
334 if b not in srcmarks:
334 if b not in srcmarks:
335 if b in dstmarks:
335 if b in dstmarks:
336 adddst((b, None, dsthex(dstmarks[b])))
336 adddst((b, None, dsthex(dstmarks[b])))
337 else:
337 else:
338 invalid((b, None, None))
338 invalid((b, None, None))
339 elif b not in dstmarks:
339 elif b not in dstmarks:
340 addsrc((b, srchex(srcmarks[b]), None))
340 addsrc((b, srchex(srcmarks[b]), None))
341 else:
341 else:
342 scid = srchex(srcmarks[b])
342 scid = srchex(srcmarks[b])
343 dcid = dsthex(dstmarks[b])
343 dcid = dsthex(dstmarks[b])
344 if scid == dcid:
344 if scid == dcid:
345 same((b, scid, dcid))
345 same((b, scid, dcid))
346 elif scid in repo and dcid in repo:
346 elif scid in repo and dcid in repo:
347 sctx = repo[scid]
347 sctx = repo[scid]
348 dctx = repo[dcid]
348 dctx = repo[dcid]
349 if sctx.rev() < dctx.rev():
349 if sctx.rev() < dctx.rev():
350 if validdest(repo, sctx, dctx):
350 if validdest(repo, sctx, dctx):
351 advdst((b, scid, dcid))
351 advdst((b, scid, dcid))
352 else:
352 else:
353 diverge((b, scid, dcid))
353 diverge((b, scid, dcid))
354 else:
354 else:
355 if validdest(repo, dctx, sctx):
355 if validdest(repo, dctx, sctx):
356 advsrc((b, scid, dcid))
356 advsrc((b, scid, dcid))
357 else:
357 else:
358 diverge((b, scid, dcid))
358 diverge((b, scid, dcid))
359 else:
359 else:
360 # it is too expensive to examine in detail, in this case
360 # it is too expensive to examine in detail, in this case
361 differ((b, scid, dcid))
361 differ((b, scid, dcid))
362
362
363 return results
363 return results
364
364
365 def _diverge(ui, b, path, localmarks):
365 def _diverge(ui, b, path, localmarks):
366 if b == '@':
366 if b == '@':
367 b = ''
367 b = ''
368 # find a unique @ suffix
368 # find a unique @ suffix
369 for x in range(1, 100):
369 for x in range(1, 100):
370 n = '%s@%d' % (b, x)
370 n = '%s@%d' % (b, x)
371 if n not in localmarks:
371 if n not in localmarks:
372 break
372 break
373 # try to use an @pathalias suffix
373 # try to use an @pathalias suffix
374 # if an @pathalias already exists, we overwrite (update) it
374 # if an @pathalias already exists, we overwrite (update) it
375 if path.startswith("file:"):
375 if path.startswith("file:"):
376 path = util.url(path).path
376 path = util.url(path).path
377 for p, u in ui.configitems("paths"):
377 for p, u in ui.configitems("paths"):
378 if u.startswith("file:"):
378 if u.startswith("file:"):
379 u = util.url(u).path
379 u = util.url(u).path
380 if path == u:
380 if path == u:
381 n = '%s@%s' % (b, p)
381 n = '%s@%s' % (b, p)
382 return n
382 return n
383
383
384 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
384 def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()):
385 ui.debug("checking for updated bookmarks\n")
385 ui.debug("checking for updated bookmarks\n")
386 localmarks = repo._bookmarks
386 localmarks = repo._bookmarks
387 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same
387 (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same
388 ) = compare(repo, remotemarks, localmarks, dsthex=hex)
388 ) = compare(repo, remotemarks, localmarks, dsthex=hex)
389
389
390 status = ui.status
390 status = ui.status
391 warn = ui.warn
391 warn = ui.warn
392 if ui.configbool('ui', 'quietbookmarkmove', False):
392 if ui.configbool('ui', 'quietbookmarkmove', False):
393 status = warn = ui.debug
393 status = warn = ui.debug
394
394
395 explicit = set(explicit)
395 explicit = set(explicit)
396 changed = []
396 changed = []
397 for b, scid, dcid in addsrc:
397 for b, scid, dcid in addsrc:
398 if scid in repo: # add remote bookmarks for changes we already have
398 if scid in repo: # add remote bookmarks for changes we already have
399 changed.append((b, bin(scid), status,
399 changed.append((b, bin(scid), status,
400 _("adding remote bookmark %s\n") % (b)))
400 _("adding remote bookmark %s\n") % (b)))
401 for b, scid, dcid in advsrc:
401 for b, scid, dcid in advsrc:
402 changed.append((b, bin(scid), status,
402 changed.append((b, bin(scid), status,
403 _("updating bookmark %s\n") % (b)))
403 _("updating bookmark %s\n") % (b)))
404 # remove normal movement from explicit set
404 # remove normal movement from explicit set
405 explicit.difference_update(d[0] for d in changed)
405 explicit.difference_update(d[0] for d in changed)
406
406
407 for b, scid, dcid in diverge:
407 for b, scid, dcid in diverge:
408 if b in explicit:
408 if b in explicit:
409 explicit.discard(b)
409 explicit.discard(b)
410 changed.append((b, bin(scid), status,
410 changed.append((b, bin(scid), status,
411 _("importing bookmark %s\n") % (b)))
411 _("importing bookmark %s\n") % (b)))
412 else:
412 else:
413 db = _diverge(ui, b, path, localmarks)
413 db = _diverge(ui, b, path, localmarks)
414 changed.append((db, bin(scid), warn,
414 changed.append((db, bin(scid), warn,
415 _("divergent bookmark %s stored as %s\n")
415 _("divergent bookmark %s stored as %s\n")
416 % (b, db)))
416 % (b, db)))
417 for b, scid, dcid in adddst + advdst:
417 for b, scid, dcid in adddst + advdst:
418 if b in explicit:
418 if b in explicit:
419 explicit.discard(b)
419 explicit.discard(b)
420 changed.append((b, bin(scid), status,
420 changed.append((b, bin(scid), status,
421 _("importing bookmark %s\n") % (b)))
421 _("importing bookmark %s\n") % (b)))
422
422
423 if changed:
423 if changed:
424 tr = trfunc()
424 tr = trfunc()
425 for b, node, writer, msg in sorted(changed):
425 for b, node, writer, msg in sorted(changed):
426 localmarks[b] = node
426 localmarks[b] = node
427 writer(msg)
427 writer(msg)
428 localmarks.recordchange(tr)
428 localmarks.recordchange(tr)
429
429
430 def diff(ui, dst, src):
430 def diff(ui, dst, src):
431 ui.status(_("searching for changed bookmarks\n"))
431 ui.status(_("searching for changed bookmarks\n"))
432
432
433 smarks = src.listkeys('bookmarks')
433 smarks = src.listkeys('bookmarks')
434 dmarks = dst.listkeys('bookmarks')
434 dmarks = dst.listkeys('bookmarks')
435
435
436 diff = sorted(set(smarks) - set(dmarks))
436 diff = sorted(set(smarks) - set(dmarks))
437 for k in diff:
437 for k in diff:
438 mark = ui.debugflag and smarks[k] or smarks[k][:12]
438 mark = ui.debugflag and smarks[k] or smarks[k][:12]
439 ui.write(" %-25s %s\n" % (k, mark))
439 ui.write(" %-25s %s\n" % (k, mark))
440
440
441 if len(diff) <= 0:
441 if len(diff) <= 0:
442 ui.status(_("no changed bookmarks found\n"))
442 ui.status(_("no changed bookmarks found\n"))
443 return 1
443 return 1
444 return 0
444 return 0
445
445
446 def validdest(repo, old, new):
446 def validdest(repo, old, new):
447 """Is the new bookmark destination a valid update from the old one"""
447 """Is the new bookmark destination a valid update from the old one"""
448 repo = repo.unfiltered()
448 repo = repo.unfiltered()
449 if old == new:
449 if old == new:
450 # Old == new -> nothing to update.
450 # Old == new -> nothing to update.
451 return False
451 return False
452 elif not old:
452 elif not old:
453 # old is nullrev, anything is valid.
453 # old is nullrev, anything is valid.
454 # (new != nullrev has been excluded by the previous check)
454 # (new != nullrev has been excluded by the previous check)
455 return True
455 return True
456 elif repo.obsstore:
456 elif repo.obsstore:
457 return new.node() in obsolete.foreground(repo, [old.node()])
457 return new.node() in obsolete.foreground(repo, [old.node()])
458 else:
458 else:
459 # still an independent clause as it is lazyer (and therefore faster)
459 # still an independent clause as it is lazyer (and therefore faster)
460 return old.descendant(new)
460 return old.descendant(new)
@@ -1,451 +1,451 b''
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
1 # branchmap.py - logic to computes, maintain and stores branchmap for local repo
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev
8 from node import bin, hex, nullid, nullrev
9 import encoding
9 import encoding
10 import util
10 import util
11 import time
11 import time
12 from array import array
12 from array import array
13 from struct import calcsize, pack, unpack
13 from struct import calcsize, pack, unpack
14
14
15 def _filename(repo):
15 def _filename(repo):
16 """name of a branchcache file for a given repo or repoview"""
16 """name of a branchcache file for a given repo or repoview"""
17 filename = "cache/branch2"
17 filename = "cache/branch2"
18 if repo.filtername:
18 if repo.filtername:
19 filename = '%s-%s' % (filename, repo.filtername)
19 filename = '%s-%s' % (filename, repo.filtername)
20 return filename
20 return filename
21
21
22 def read(repo):
22 def read(repo):
23 try:
23 try:
24 f = repo.opener(_filename(repo))
24 f = repo.vfs(_filename(repo))
25 lines = f.read().split('\n')
25 lines = f.read().split('\n')
26 f.close()
26 f.close()
27 except (IOError, OSError):
27 except (IOError, OSError):
28 return None
28 return None
29
29
30 try:
30 try:
31 cachekey = lines.pop(0).split(" ", 2)
31 cachekey = lines.pop(0).split(" ", 2)
32 last, lrev = cachekey[:2]
32 last, lrev = cachekey[:2]
33 last, lrev = bin(last), int(lrev)
33 last, lrev = bin(last), int(lrev)
34 filteredhash = None
34 filteredhash = None
35 if len(cachekey) > 2:
35 if len(cachekey) > 2:
36 filteredhash = bin(cachekey[2])
36 filteredhash = bin(cachekey[2])
37 partial = branchcache(tipnode=last, tiprev=lrev,
37 partial = branchcache(tipnode=last, tiprev=lrev,
38 filteredhash=filteredhash)
38 filteredhash=filteredhash)
39 if not partial.validfor(repo):
39 if not partial.validfor(repo):
40 # invalidate the cache
40 # invalidate the cache
41 raise ValueError('tip differs')
41 raise ValueError('tip differs')
42 for l in lines:
42 for l in lines:
43 if not l:
43 if not l:
44 continue
44 continue
45 node, state, label = l.split(" ", 2)
45 node, state, label = l.split(" ", 2)
46 if state not in 'oc':
46 if state not in 'oc':
47 raise ValueError('invalid branch state')
47 raise ValueError('invalid branch state')
48 label = encoding.tolocal(label.strip())
48 label = encoding.tolocal(label.strip())
49 if not node in repo:
49 if not node in repo:
50 raise ValueError('node %s does not exist' % node)
50 raise ValueError('node %s does not exist' % node)
51 node = bin(node)
51 node = bin(node)
52 partial.setdefault(label, []).append(node)
52 partial.setdefault(label, []).append(node)
53 if state == 'c':
53 if state == 'c':
54 partial._closednodes.add(node)
54 partial._closednodes.add(node)
55 except KeyboardInterrupt:
55 except KeyboardInterrupt:
56 raise
56 raise
57 except Exception, inst:
57 except Exception, inst:
58 if repo.ui.debugflag:
58 if repo.ui.debugflag:
59 msg = 'invalid branchheads cache'
59 msg = 'invalid branchheads cache'
60 if repo.filtername is not None:
60 if repo.filtername is not None:
61 msg += ' (%s)' % repo.filtername
61 msg += ' (%s)' % repo.filtername
62 msg += ': %s\n'
62 msg += ': %s\n'
63 repo.ui.debug(msg % inst)
63 repo.ui.debug(msg % inst)
64 partial = None
64 partial = None
65 return partial
65 return partial
66
66
67 ### Nearest subset relation
67 ### Nearest subset relation
68 # Nearest subset of filter X is a filter Y so that:
68 # Nearest subset of filter X is a filter Y so that:
69 # * Y is included in X,
69 # * Y is included in X,
70 # * X - Y is as small as possible.
70 # * X - Y is as small as possible.
71 # This create and ordering used for branchmap purpose.
71 # This create and ordering used for branchmap purpose.
72 # the ordering may be partial
72 # the ordering may be partial
73 subsettable = {None: 'visible',
73 subsettable = {None: 'visible',
74 'visible': 'served',
74 'visible': 'served',
75 'served': 'immutable',
75 'served': 'immutable',
76 'immutable': 'base'}
76 'immutable': 'base'}
77
77
78 def updatecache(repo):
78 def updatecache(repo):
79 cl = repo.changelog
79 cl = repo.changelog
80 filtername = repo.filtername
80 filtername = repo.filtername
81 partial = repo._branchcaches.get(filtername)
81 partial = repo._branchcaches.get(filtername)
82
82
83 revs = []
83 revs = []
84 if partial is None or not partial.validfor(repo):
84 if partial is None or not partial.validfor(repo):
85 partial = read(repo)
85 partial = read(repo)
86 if partial is None:
86 if partial is None:
87 subsetname = subsettable.get(filtername)
87 subsetname = subsettable.get(filtername)
88 if subsetname is None:
88 if subsetname is None:
89 partial = branchcache()
89 partial = branchcache()
90 else:
90 else:
91 subset = repo.filtered(subsetname)
91 subset = repo.filtered(subsetname)
92 partial = subset.branchmap().copy()
92 partial = subset.branchmap().copy()
93 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
93 extrarevs = subset.changelog.filteredrevs - cl.filteredrevs
94 revs.extend(r for r in extrarevs if r <= partial.tiprev)
94 revs.extend(r for r in extrarevs if r <= partial.tiprev)
95 revs.extend(cl.revs(start=partial.tiprev + 1))
95 revs.extend(cl.revs(start=partial.tiprev + 1))
96 if revs:
96 if revs:
97 partial.update(repo, revs)
97 partial.update(repo, revs)
98 partial.write(repo)
98 partial.write(repo)
99 assert partial.validfor(repo), filtername
99 assert partial.validfor(repo), filtername
100 repo._branchcaches[repo.filtername] = partial
100 repo._branchcaches[repo.filtername] = partial
101
101
102 class branchcache(dict):
102 class branchcache(dict):
103 """A dict like object that hold branches heads cache.
103 """A dict like object that hold branches heads cache.
104
104
105 This cache is used to avoid costly computations to determine all the
105 This cache is used to avoid costly computations to determine all the
106 branch heads of a repo.
106 branch heads of a repo.
107
107
108 The cache is serialized on disk in the following format:
108 The cache is serialized on disk in the following format:
109
109
110 <tip hex node> <tip rev number> [optional filtered repo hex hash]
110 <tip hex node> <tip rev number> [optional filtered repo hex hash]
111 <branch head hex node> <open/closed state> <branch name>
111 <branch head hex node> <open/closed state> <branch name>
112 <branch head hex node> <open/closed state> <branch name>
112 <branch head hex node> <open/closed state> <branch name>
113 ...
113 ...
114
114
115 The first line is used to check if the cache is still valid. If the
115 The first line is used to check if the cache is still valid. If the
116 branch cache is for a filtered repo view, an optional third hash is
116 branch cache is for a filtered repo view, an optional third hash is
117 included that hashes the hashes of all filtered revisions.
117 included that hashes the hashes of all filtered revisions.
118
118
119 The open/closed state is represented by a single letter 'o' or 'c'.
119 The open/closed state is represented by a single letter 'o' or 'c'.
120 This field can be used to avoid changelog reads when determining if a
120 This field can be used to avoid changelog reads when determining if a
121 branch head closes a branch or not.
121 branch head closes a branch or not.
122 """
122 """
123
123
124 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
124 def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev,
125 filteredhash=None, closednodes=None):
125 filteredhash=None, closednodes=None):
126 super(branchcache, self).__init__(entries)
126 super(branchcache, self).__init__(entries)
127 self.tipnode = tipnode
127 self.tipnode = tipnode
128 self.tiprev = tiprev
128 self.tiprev = tiprev
129 self.filteredhash = filteredhash
129 self.filteredhash = filteredhash
130 # closednodes is a set of nodes that close their branch. If the branch
130 # closednodes is a set of nodes that close their branch. If the branch
131 # cache has been updated, it may contain nodes that are no longer
131 # cache has been updated, it may contain nodes that are no longer
132 # heads.
132 # heads.
133 if closednodes is None:
133 if closednodes is None:
134 self._closednodes = set()
134 self._closednodes = set()
135 else:
135 else:
136 self._closednodes = closednodes
136 self._closednodes = closednodes
137 self._revbranchcache = None
137 self._revbranchcache = None
138
138
139 def _hashfiltered(self, repo):
139 def _hashfiltered(self, repo):
140 """build hash of revision filtered in the current cache
140 """build hash of revision filtered in the current cache
141
141
142 Tracking tipnode and tiprev is not enough to ensure validity of the
142 Tracking tipnode and tiprev is not enough to ensure validity of the
143 cache as they do not help to distinct cache that ignored various
143 cache as they do not help to distinct cache that ignored various
144 revision bellow tiprev.
144 revision bellow tiprev.
145
145
146 To detect such difference, we build a cache of all ignored revisions.
146 To detect such difference, we build a cache of all ignored revisions.
147 """
147 """
148 cl = repo.changelog
148 cl = repo.changelog
149 if not cl.filteredrevs:
149 if not cl.filteredrevs:
150 return None
150 return None
151 key = None
151 key = None
152 revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
152 revs = sorted(r for r in cl.filteredrevs if r <= self.tiprev)
153 if revs:
153 if revs:
154 s = util.sha1()
154 s = util.sha1()
155 for rev in revs:
155 for rev in revs:
156 s.update('%s;' % rev)
156 s.update('%s;' % rev)
157 key = s.digest()
157 key = s.digest()
158 return key
158 return key
159
159
160 def validfor(self, repo):
160 def validfor(self, repo):
161 """Is the cache content valid regarding a repo
161 """Is the cache content valid regarding a repo
162
162
163 - False when cached tipnode is unknown or if we detect a strip.
163 - False when cached tipnode is unknown or if we detect a strip.
164 - True when cache is up to date or a subset of current repo."""
164 - True when cache is up to date or a subset of current repo."""
165 try:
165 try:
166 return ((self.tipnode == repo.changelog.node(self.tiprev))
166 return ((self.tipnode == repo.changelog.node(self.tiprev))
167 and (self.filteredhash == self._hashfiltered(repo)))
167 and (self.filteredhash == self._hashfiltered(repo)))
168 except IndexError:
168 except IndexError:
169 return False
169 return False
170
170
171 def _branchtip(self, heads):
171 def _branchtip(self, heads):
172 '''Return tuple with last open head in heads and false,
172 '''Return tuple with last open head in heads and false,
173 otherwise return last closed head and true.'''
173 otherwise return last closed head and true.'''
174 tip = heads[-1]
174 tip = heads[-1]
175 closed = True
175 closed = True
176 for h in reversed(heads):
176 for h in reversed(heads):
177 if h not in self._closednodes:
177 if h not in self._closednodes:
178 tip = h
178 tip = h
179 closed = False
179 closed = False
180 break
180 break
181 return tip, closed
181 return tip, closed
182
182
183 def branchtip(self, branch):
183 def branchtip(self, branch):
184 '''Return the tipmost open head on branch head, otherwise return the
184 '''Return the tipmost open head on branch head, otherwise return the
185 tipmost closed head on branch.
185 tipmost closed head on branch.
186 Raise KeyError for unknown branch.'''
186 Raise KeyError for unknown branch.'''
187 return self._branchtip(self[branch])[0]
187 return self._branchtip(self[branch])[0]
188
188
189 def branchheads(self, branch, closed=False):
189 def branchheads(self, branch, closed=False):
190 heads = self[branch]
190 heads = self[branch]
191 if not closed:
191 if not closed:
192 heads = [h for h in heads if h not in self._closednodes]
192 heads = [h for h in heads if h not in self._closednodes]
193 return heads
193 return heads
194
194
195 def iterbranches(self):
195 def iterbranches(self):
196 for bn, heads in self.iteritems():
196 for bn, heads in self.iteritems():
197 yield (bn, heads) + self._branchtip(heads)
197 yield (bn, heads) + self._branchtip(heads)
198
198
199 def copy(self):
199 def copy(self):
200 """return an deep copy of the branchcache object"""
200 """return an deep copy of the branchcache object"""
201 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
201 return branchcache(self, self.tipnode, self.tiprev, self.filteredhash,
202 self._closednodes)
202 self._closednodes)
203
203
204 def write(self, repo):
204 def write(self, repo):
205 try:
205 try:
206 f = repo.opener(_filename(repo), "w", atomictemp=True)
206 f = repo.vfs(_filename(repo), "w", atomictemp=True)
207 cachekey = [hex(self.tipnode), str(self.tiprev)]
207 cachekey = [hex(self.tipnode), str(self.tiprev)]
208 if self.filteredhash is not None:
208 if self.filteredhash is not None:
209 cachekey.append(hex(self.filteredhash))
209 cachekey.append(hex(self.filteredhash))
210 f.write(" ".join(cachekey) + '\n')
210 f.write(" ".join(cachekey) + '\n')
211 nodecount = 0
211 nodecount = 0
212 for label, nodes in sorted(self.iteritems()):
212 for label, nodes in sorted(self.iteritems()):
213 for node in nodes:
213 for node in nodes:
214 nodecount += 1
214 nodecount += 1
215 if node in self._closednodes:
215 if node in self._closednodes:
216 state = 'c'
216 state = 'c'
217 else:
217 else:
218 state = 'o'
218 state = 'o'
219 f.write("%s %s %s\n" % (hex(node), state,
219 f.write("%s %s %s\n" % (hex(node), state,
220 encoding.fromlocal(label)))
220 encoding.fromlocal(label)))
221 f.close()
221 f.close()
222 repo.ui.log('branchcache',
222 repo.ui.log('branchcache',
223 'wrote %s branch cache with %d labels and %d nodes\n',
223 'wrote %s branch cache with %d labels and %d nodes\n',
224 repo.filtername, len(self), nodecount)
224 repo.filtername, len(self), nodecount)
225 except (IOError, OSError, util.Abort), inst:
225 except (IOError, OSError, util.Abort), inst:
226 repo.ui.debug("couldn't write branch cache: %s\n" % inst)
226 repo.ui.debug("couldn't write branch cache: %s\n" % inst)
227 # Abort may be raise by read only opener
227 # Abort may be raise by read only opener
228 pass
228 pass
229 if self._revbranchcache:
229 if self._revbranchcache:
230 self._revbranchcache.write(repo.unfiltered())
230 self._revbranchcache.write(repo.unfiltered())
231 self._revbranchcache = None
231 self._revbranchcache = None
232
232
233 def update(self, repo, revgen):
233 def update(self, repo, revgen):
234 """Given a branchhead cache, self, that may have extra nodes or be
234 """Given a branchhead cache, self, that may have extra nodes or be
235 missing heads, and a generator of nodes that are strictly a superset of
235 missing heads, and a generator of nodes that are strictly a superset of
236 heads missing, this function updates self to be correct.
236 heads missing, this function updates self to be correct.
237 """
237 """
238 starttime = time.time()
238 starttime = time.time()
239 cl = repo.changelog
239 cl = repo.changelog
240 # collect new branch entries
240 # collect new branch entries
241 newbranches = {}
241 newbranches = {}
242 urepo = repo.unfiltered()
242 urepo = repo.unfiltered()
243 self._revbranchcache = revbranchcache(urepo)
243 self._revbranchcache = revbranchcache(urepo)
244 getbranchinfo = self._revbranchcache.branchinfo
244 getbranchinfo = self._revbranchcache.branchinfo
245 ucl = urepo.changelog
245 ucl = urepo.changelog
246 for r in revgen:
246 for r in revgen:
247 branch, closesbranch = getbranchinfo(ucl, r)
247 branch, closesbranch = getbranchinfo(ucl, r)
248 newbranches.setdefault(branch, []).append(r)
248 newbranches.setdefault(branch, []).append(r)
249 if closesbranch:
249 if closesbranch:
250 self._closednodes.add(cl.node(r))
250 self._closednodes.add(cl.node(r))
251
251
252 # fetch current topological heads to speed up filtering
252 # fetch current topological heads to speed up filtering
253 topoheads = set(cl.headrevs())
253 topoheads = set(cl.headrevs())
254
254
255 # if older branchheads are reachable from new ones, they aren't
255 # if older branchheads are reachable from new ones, they aren't
256 # really branchheads. Note checking parents is insufficient:
256 # really branchheads. Note checking parents is insufficient:
257 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
257 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
258 for branch, newheadrevs in newbranches.iteritems():
258 for branch, newheadrevs in newbranches.iteritems():
259 bheads = self.setdefault(branch, [])
259 bheads = self.setdefault(branch, [])
260 bheadset = set(cl.rev(node) for node in bheads)
260 bheadset = set(cl.rev(node) for node in bheads)
261
261
262 # This have been tested True on all internal usage of this function.
262 # This have been tested True on all internal usage of this function.
263 # run it again in case of doubt
263 # run it again in case of doubt
264 # assert not (set(bheadrevs) & set(newheadrevs))
264 # assert not (set(bheadrevs) & set(newheadrevs))
265 newheadrevs.sort()
265 newheadrevs.sort()
266 bheadset.update(newheadrevs)
266 bheadset.update(newheadrevs)
267
267
268 # This prunes out two kinds of heads - heads that are superseded by
268 # This prunes out two kinds of heads - heads that are superseded by
269 # a head in newheadrevs, and newheadrevs that are not heads because
269 # a head in newheadrevs, and newheadrevs that are not heads because
270 # an existing head is their descendant.
270 # an existing head is their descendant.
271 uncertain = bheadset - topoheads
271 uncertain = bheadset - topoheads
272 if uncertain:
272 if uncertain:
273 floorrev = min(uncertain)
273 floorrev = min(uncertain)
274 ancestors = set(cl.ancestors(newheadrevs, floorrev))
274 ancestors = set(cl.ancestors(newheadrevs, floorrev))
275 bheadset -= ancestors
275 bheadset -= ancestors
276 bheadrevs = sorted(bheadset)
276 bheadrevs = sorted(bheadset)
277 self[branch] = [cl.node(rev) for rev in bheadrevs]
277 self[branch] = [cl.node(rev) for rev in bheadrevs]
278 tiprev = bheadrevs[-1]
278 tiprev = bheadrevs[-1]
279 if tiprev > self.tiprev:
279 if tiprev > self.tiprev:
280 self.tipnode = cl.node(tiprev)
280 self.tipnode = cl.node(tiprev)
281 self.tiprev = tiprev
281 self.tiprev = tiprev
282
282
283 if not self.validfor(repo):
283 if not self.validfor(repo):
284 # cache key are not valid anymore
284 # cache key are not valid anymore
285 self.tipnode = nullid
285 self.tipnode = nullid
286 self.tiprev = nullrev
286 self.tiprev = nullrev
287 for heads in self.values():
287 for heads in self.values():
288 tiprev = max(cl.rev(node) for node in heads)
288 tiprev = max(cl.rev(node) for node in heads)
289 if tiprev > self.tiprev:
289 if tiprev > self.tiprev:
290 self.tipnode = cl.node(tiprev)
290 self.tipnode = cl.node(tiprev)
291 self.tiprev = tiprev
291 self.tiprev = tiprev
292 self.filteredhash = self._hashfiltered(repo)
292 self.filteredhash = self._hashfiltered(repo)
293
293
294 duration = time.time() - starttime
294 duration = time.time() - starttime
295 repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
295 repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n',
296 repo.filtername, duration)
296 repo.filtername, duration)
297
297
298 # Revision branch info cache
298 # Revision branch info cache
299
299
300 _rbcversion = '-v1'
300 _rbcversion = '-v1'
301 _rbcnames = 'cache/rbc-names' + _rbcversion
301 _rbcnames = 'cache/rbc-names' + _rbcversion
302 _rbcrevs = 'cache/rbc-revs' + _rbcversion
302 _rbcrevs = 'cache/rbc-revs' + _rbcversion
303 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
303 # [4 byte hash prefix][4 byte branch name number with sign bit indicating open]
304 _rbcrecfmt = '>4sI'
304 _rbcrecfmt = '>4sI'
305 _rbcrecsize = calcsize(_rbcrecfmt)
305 _rbcrecsize = calcsize(_rbcrecfmt)
306 _rbcnodelen = 4
306 _rbcnodelen = 4
307 _rbcbranchidxmask = 0x7fffffff
307 _rbcbranchidxmask = 0x7fffffff
308 _rbccloseflag = 0x80000000
308 _rbccloseflag = 0x80000000
309
309
310 class revbranchcache(object):
310 class revbranchcache(object):
311 """Persistent cache, mapping from revision number to branch name and close.
311 """Persistent cache, mapping from revision number to branch name and close.
312 This is a low level cache, independent of filtering.
312 This is a low level cache, independent of filtering.
313
313
314 Branch names are stored in rbc-names in internal encoding separated by 0.
314 Branch names are stored in rbc-names in internal encoding separated by 0.
315 rbc-names is append-only, and each branch name is only stored once and will
315 rbc-names is append-only, and each branch name is only stored once and will
316 thus have a unique index.
316 thus have a unique index.
317
317
318 The branch info for each revision is stored in rbc-revs as constant size
318 The branch info for each revision is stored in rbc-revs as constant size
319 records. The whole file is read into memory, but it is only 'parsed' on
319 records. The whole file is read into memory, but it is only 'parsed' on
320 demand. The file is usually append-only but will be truncated if repo
320 demand. The file is usually append-only but will be truncated if repo
321 modification is detected.
321 modification is detected.
322 The record for each revision contains the first 4 bytes of the
322 The record for each revision contains the first 4 bytes of the
323 corresponding node hash, and the record is only used if it still matches.
323 corresponding node hash, and the record is only used if it still matches.
324 Even a completely trashed rbc-revs fill thus still give the right result
324 Even a completely trashed rbc-revs fill thus still give the right result
325 while converging towards full recovery ... assuming no incorrectly matching
325 while converging towards full recovery ... assuming no incorrectly matching
326 node hashes.
326 node hashes.
327 The record also contains 4 bytes where 31 bits contains the index of the
327 The record also contains 4 bytes where 31 bits contains the index of the
328 branch and the last bit indicate that it is a branch close commit.
328 branch and the last bit indicate that it is a branch close commit.
329 The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
329 The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i
330 and will grow with it but be 1/8th of its size.
330 and will grow with it but be 1/8th of its size.
331 """
331 """
332
332
333 def __init__(self, repo):
333 def __init__(self, repo):
334 assert repo.filtername is None
334 assert repo.filtername is None
335 self._names = [] # branch names in local encoding with static index
335 self._names = [] # branch names in local encoding with static index
336 self._rbcrevs = array('c') # structs of type _rbcrecfmt
336 self._rbcrevs = array('c') # structs of type _rbcrecfmt
337 self._rbcsnameslen = 0
337 self._rbcsnameslen = 0
338 try:
338 try:
339 bndata = repo.vfs.read(_rbcnames)
339 bndata = repo.vfs.read(_rbcnames)
340 self._rbcsnameslen = len(bndata) # for verification before writing
340 self._rbcsnameslen = len(bndata) # for verification before writing
341 self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')]
341 self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')]
342 except (IOError, OSError), inst:
342 except (IOError, OSError), inst:
343 repo.ui.debug("couldn't read revision branch cache names: %s\n" %
343 repo.ui.debug("couldn't read revision branch cache names: %s\n" %
344 inst)
344 inst)
345 if self._names:
345 if self._names:
346 try:
346 try:
347 data = repo.vfs.read(_rbcrevs)
347 data = repo.vfs.read(_rbcrevs)
348 self._rbcrevs.fromstring(data)
348 self._rbcrevs.fromstring(data)
349 except (IOError, OSError), inst:
349 except (IOError, OSError), inst:
350 repo.ui.debug("couldn't read revision branch cache: %s\n" %
350 repo.ui.debug("couldn't read revision branch cache: %s\n" %
351 inst)
351 inst)
352 # remember number of good records on disk
352 # remember number of good records on disk
353 self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
353 self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize,
354 len(repo.changelog))
354 len(repo.changelog))
355 if self._rbcrevslen == 0:
355 if self._rbcrevslen == 0:
356 self._names = []
356 self._names = []
357 self._rbcnamescount = len(self._names) # number of good names on disk
357 self._rbcnamescount = len(self._names) # number of good names on disk
358 self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
358 self._namesreverse = dict((b, r) for r, b in enumerate(self._names))
359
359
360 def branchinfo(self, changelog, rev):
360 def branchinfo(self, changelog, rev):
361 """Return branch name and close flag for rev, using and updating
361 """Return branch name and close flag for rev, using and updating
362 persistent cache."""
362 persistent cache."""
363 rbcrevidx = rev * _rbcrecsize
363 rbcrevidx = rev * _rbcrecsize
364
364
365 # if requested rev is missing, add and populate all missing revs
365 # if requested rev is missing, add and populate all missing revs
366 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
366 if len(self._rbcrevs) < rbcrevidx + _rbcrecsize:
367 first = len(self._rbcrevs) // _rbcrecsize
367 first = len(self._rbcrevs) // _rbcrecsize
368 self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
368 self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize -
369 len(self._rbcrevs)))
369 len(self._rbcrevs)))
370 for r in xrange(first, len(changelog)):
370 for r in xrange(first, len(changelog)):
371 self._branchinfo(changelog, r)
371 self._branchinfo(changelog, r)
372
372
373 # fast path: extract data from cache, use it if node is matching
373 # fast path: extract data from cache, use it if node is matching
374 reponode = changelog.node(rev)[:_rbcnodelen]
374 reponode = changelog.node(rev)[:_rbcnodelen]
375 cachenode, branchidx = unpack(
375 cachenode, branchidx = unpack(
376 _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize))
376 _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize))
377 close = bool(branchidx & _rbccloseflag)
377 close = bool(branchidx & _rbccloseflag)
378 if close:
378 if close:
379 branchidx &= _rbcbranchidxmask
379 branchidx &= _rbcbranchidxmask
380 if cachenode == reponode:
380 if cachenode == reponode:
381 return self._names[branchidx], close
381 return self._names[branchidx], close
382 # fall back to slow path and make sure it will be written to disk
382 # fall back to slow path and make sure it will be written to disk
383 self._rbcrevslen = min(self._rbcrevslen, rev)
383 self._rbcrevslen = min(self._rbcrevslen, rev)
384 return self._branchinfo(changelog, rev)
384 return self._branchinfo(changelog, rev)
385
385
386 def _branchinfo(self, changelog, rev):
386 def _branchinfo(self, changelog, rev):
387 """Retrieve branch info from changelog and update _rbcrevs"""
387 """Retrieve branch info from changelog and update _rbcrevs"""
388 b, close = changelog.branchinfo(rev)
388 b, close = changelog.branchinfo(rev)
389 if b in self._namesreverse:
389 if b in self._namesreverse:
390 branchidx = self._namesreverse[b]
390 branchidx = self._namesreverse[b]
391 else:
391 else:
392 branchidx = len(self._names)
392 branchidx = len(self._names)
393 self._names.append(b)
393 self._names.append(b)
394 self._namesreverse[b] = branchidx
394 self._namesreverse[b] = branchidx
395 reponode = changelog.node(rev)
395 reponode = changelog.node(rev)
396 if close:
396 if close:
397 branchidx |= _rbccloseflag
397 branchidx |= _rbccloseflag
398 rbcrevidx = rev * _rbcrecsize
398 rbcrevidx = rev * _rbcrecsize
399 rec = array('c')
399 rec = array('c')
400 rec.fromstring(pack(_rbcrecfmt, reponode, branchidx))
400 rec.fromstring(pack(_rbcrecfmt, reponode, branchidx))
401 self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec
401 self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec
402 return b, close
402 return b, close
403
403
404 def write(self, repo):
404 def write(self, repo):
405 """Save branch cache if it is dirty."""
405 """Save branch cache if it is dirty."""
406 if self._rbcnamescount < len(self._names):
406 if self._rbcnamescount < len(self._names):
407 try:
407 try:
408 if self._rbcnamescount != 0:
408 if self._rbcnamescount != 0:
409 f = repo.vfs.open(_rbcnames, 'ab')
409 f = repo.vfs.open(_rbcnames, 'ab')
410 # The position after open(x, 'a') is implementation defined-
410 # The position after open(x, 'a') is implementation defined-
411 # see issue3543. SEEK_END was added in 2.5
411 # see issue3543. SEEK_END was added in 2.5
412 f.seek(0, 2) #os.SEEK_END
412 f.seek(0, 2) #os.SEEK_END
413 if f.tell() == self._rbcsnameslen:
413 if f.tell() == self._rbcsnameslen:
414 f.write('\0')
414 f.write('\0')
415 else:
415 else:
416 f.close()
416 f.close()
417 repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
417 repo.ui.debug("%s changed - rewriting it\n" % _rbcnames)
418 self._rbcnamescount = 0
418 self._rbcnamescount = 0
419 self._rbcrevslen = 0
419 self._rbcrevslen = 0
420 if self._rbcnamescount == 0:
420 if self._rbcnamescount == 0:
421 f = repo.vfs.open(_rbcnames, 'wb')
421 f = repo.vfs.open(_rbcnames, 'wb')
422 f.write('\0'.join(encoding.fromlocal(b)
422 f.write('\0'.join(encoding.fromlocal(b)
423 for b in self._names[self._rbcnamescount:]))
423 for b in self._names[self._rbcnamescount:]))
424 self._rbcsnameslen = f.tell()
424 self._rbcsnameslen = f.tell()
425 f.close()
425 f.close()
426 except (IOError, OSError, util.Abort), inst:
426 except (IOError, OSError, util.Abort), inst:
427 repo.ui.debug("couldn't write revision branch cache names: "
427 repo.ui.debug("couldn't write revision branch cache names: "
428 "%s\n" % inst)
428 "%s\n" % inst)
429 return
429 return
430 self._rbcnamescount = len(self._names)
430 self._rbcnamescount = len(self._names)
431
431
432 start = self._rbcrevslen * _rbcrecsize
432 start = self._rbcrevslen * _rbcrecsize
433 if start != len(self._rbcrevs):
433 if start != len(self._rbcrevs):
434 revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
434 revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize)
435 try:
435 try:
436 f = repo.vfs.open(_rbcrevs, 'ab')
436 f = repo.vfs.open(_rbcrevs, 'ab')
437 # The position after open(x, 'a') is implementation defined-
437 # The position after open(x, 'a') is implementation defined-
438 # see issue3543. SEEK_END was added in 2.5
438 # see issue3543. SEEK_END was added in 2.5
439 f.seek(0, 2) #os.SEEK_END
439 f.seek(0, 2) #os.SEEK_END
440 if f.tell() != start:
440 if f.tell() != start:
441 repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start))
441 repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start))
442 f.seek(start)
442 f.seek(start)
443 f.truncate()
443 f.truncate()
444 end = revs * _rbcrecsize
444 end = revs * _rbcrecsize
445 f.write(self._rbcrevs[start:end])
445 f.write(self._rbcrevs[start:end])
446 f.close()
446 f.close()
447 except (IOError, OSError, util.Abort), inst:
447 except (IOError, OSError, util.Abort), inst:
448 repo.ui.debug("couldn't write revision branch cache: %s\n" %
448 repo.ui.debug("couldn't write revision branch cache: %s\n" %
449 inst)
449 inst)
450 return
450 return
451 self._rbcrevslen = revs
451 self._rbcrevslen = revs
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now