##// END OF EJS Templates
global: use raw strings for regular expressions with escapes...
Gregory Szorc -
r41673:bd3f03d8 default
parent child Browse files
Show More
@@ -1,197 +1,197
1 # blackbox.py - log repository events to a file for post-mortem debugging
1 # blackbox.py - log repository events to a file for post-mortem debugging
2 #
2 #
3 # Copyright 2010 Nicolas Dumazet
3 # Copyright 2010 Nicolas Dumazet
4 # Copyright 2013 Facebook, Inc.
4 # Copyright 2013 Facebook, Inc.
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 """log repository events to a blackbox for debugging
9 """log repository events to a blackbox for debugging
10
10
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
11 Logs event information to .hg/blackbox.log to help debug and diagnose problems.
12 The events that get logged can be configured via the blackbox.track config key.
12 The events that get logged can be configured via the blackbox.track config key.
13
13
14 Examples::
14 Examples::
15
15
16 [blackbox]
16 [blackbox]
17 track = *
17 track = *
18 # dirty is *EXPENSIVE* (slow);
18 # dirty is *EXPENSIVE* (slow);
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
19 # each log entry indicates `+` if the repository is dirty, like :hg:`id`.
20 dirty = True
20 dirty = True
21 # record the source of log messages
21 # record the source of log messages
22 logsource = True
22 logsource = True
23
23
24 [blackbox]
24 [blackbox]
25 track = command, commandfinish, commandexception, exthook, pythonhook
25 track = command, commandfinish, commandexception, exthook, pythonhook
26
26
27 [blackbox]
27 [blackbox]
28 track = incoming
28 track = incoming
29
29
30 [blackbox]
30 [blackbox]
31 # limit the size of a log file
31 # limit the size of a log file
32 maxsize = 1.5 MB
32 maxsize = 1.5 MB
33 # rotate up to N log files when the current one gets too big
33 # rotate up to N log files when the current one gets too big
34 maxfiles = 3
34 maxfiles = 3
35
35
36 [blackbox]
36 [blackbox]
37 # Include nanoseconds in log entries with %f (see Python function
37 # Include nanoseconds in log entries with %f (see Python function
38 # datetime.datetime.strftime)
38 # datetime.datetime.strftime)
39 date-format = '%Y-%m-%d @ %H:%M:%S.%f'
39 date-format = '%Y-%m-%d @ %H:%M:%S.%f'
40
40
41 """
41 """
42
42
43 from __future__ import absolute_import
43 from __future__ import absolute_import
44
44
45 import re
45 import re
46
46
47 from mercurial.i18n import _
47 from mercurial.i18n import _
48 from mercurial.node import hex
48 from mercurial.node import hex
49
49
50 from mercurial import (
50 from mercurial import (
51 encoding,
51 encoding,
52 loggingutil,
52 loggingutil,
53 registrar,
53 registrar,
54 )
54 )
55 from mercurial.utils import (
55 from mercurial.utils import (
56 dateutil,
56 dateutil,
57 procutil,
57 procutil,
58 )
58 )
59
59
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
60 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
61 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
62 # be specifying the version(s) of Mercurial they are tested with, or
62 # be specifying the version(s) of Mercurial they are tested with, or
63 # leave the attribute unspecified.
63 # leave the attribute unspecified.
64 testedwith = 'ships-with-hg-core'
64 testedwith = 'ships-with-hg-core'
65
65
66 cmdtable = {}
66 cmdtable = {}
67 command = registrar.command(cmdtable)
67 command = registrar.command(cmdtable)
68
68
69 configtable = {}
69 configtable = {}
70 configitem = registrar.configitem(configtable)
70 configitem = registrar.configitem(configtable)
71
71
72 configitem('blackbox', 'dirty',
72 configitem('blackbox', 'dirty',
73 default=False,
73 default=False,
74 )
74 )
75 configitem('blackbox', 'maxsize',
75 configitem('blackbox', 'maxsize',
76 default='1 MB',
76 default='1 MB',
77 )
77 )
78 configitem('blackbox', 'logsource',
78 configitem('blackbox', 'logsource',
79 default=False,
79 default=False,
80 )
80 )
81 configitem('blackbox', 'maxfiles',
81 configitem('blackbox', 'maxfiles',
82 default=7,
82 default=7,
83 )
83 )
84 configitem('blackbox', 'track',
84 configitem('blackbox', 'track',
85 default=lambda: ['*'],
85 default=lambda: ['*'],
86 )
86 )
87 configitem('blackbox', 'date-format',
87 configitem('blackbox', 'date-format',
88 default='%Y/%m/%d %H:%M:%S',
88 default='%Y/%m/%d %H:%M:%S',
89 )
89 )
90
90
91 _lastlogger = loggingutil.proxylogger()
91 _lastlogger = loggingutil.proxylogger()
92
92
93 class blackboxlogger(object):
93 class blackboxlogger(object):
94 def __init__(self, ui, repo):
94 def __init__(self, ui, repo):
95 self._repo = repo
95 self._repo = repo
96 self._trackedevents = set(ui.configlist('blackbox', 'track'))
96 self._trackedevents = set(ui.configlist('blackbox', 'track'))
97 self._maxfiles = ui.configint('blackbox', 'maxfiles')
97 self._maxfiles = ui.configint('blackbox', 'maxfiles')
98 self._maxsize = ui.configbytes('blackbox', 'maxsize')
98 self._maxsize = ui.configbytes('blackbox', 'maxsize')
99 self._inlog = False
99 self._inlog = False
100
100
101 def tracked(self, event):
101 def tracked(self, event):
102 return b'*' in self._trackedevents or event in self._trackedevents
102 return b'*' in self._trackedevents or event in self._trackedevents
103
103
104 def log(self, ui, event, msg, opts):
104 def log(self, ui, event, msg, opts):
105 # self._log() -> ctx.dirty() may create new subrepo instance, which
105 # self._log() -> ctx.dirty() may create new subrepo instance, which
106 # ui is derived from baseui. So the recursion guard in ui.log()
106 # ui is derived from baseui. So the recursion guard in ui.log()
107 # doesn't work as it's local to the ui instance.
107 # doesn't work as it's local to the ui instance.
108 if self._inlog:
108 if self._inlog:
109 return
109 return
110 self._inlog = True
110 self._inlog = True
111 try:
111 try:
112 self._log(ui, event, msg, opts)
112 self._log(ui, event, msg, opts)
113 finally:
113 finally:
114 self._inlog = False
114 self._inlog = False
115
115
116 def _log(self, ui, event, msg, opts):
116 def _log(self, ui, event, msg, opts):
117 default = ui.configdate('devel', 'default-date')
117 default = ui.configdate('devel', 'default-date')
118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
118 date = dateutil.datestr(default, ui.config('blackbox', 'date-format'))
119 user = procutil.getuser()
119 user = procutil.getuser()
120 pid = '%d' % procutil.getpid()
120 pid = '%d' % procutil.getpid()
121 changed = ''
121 changed = ''
122 ctx = self._repo[None]
122 ctx = self._repo[None]
123 parents = ctx.parents()
123 parents = ctx.parents()
124 rev = ('+'.join([hex(p.node()) for p in parents]))
124 rev = ('+'.join([hex(p.node()) for p in parents]))
125 if (ui.configbool('blackbox', 'dirty') and
125 if (ui.configbool('blackbox', 'dirty') and
126 ctx.dirty(missing=True, merge=False, branch=False)):
126 ctx.dirty(missing=True, merge=False, branch=False)):
127 changed = '+'
127 changed = '+'
128 if ui.configbool('blackbox', 'logsource'):
128 if ui.configbool('blackbox', 'logsource'):
129 src = ' [%s]' % event
129 src = ' [%s]' % event
130 else:
130 else:
131 src = ''
131 src = ''
132 try:
132 try:
133 fmt = '%s %s @%s%s (%s)%s> %s'
133 fmt = '%s %s @%s%s (%s)%s> %s'
134 args = (date, user, rev, changed, pid, src, msg)
134 args = (date, user, rev, changed, pid, src, msg)
135 with loggingutil.openlogfile(
135 with loggingutil.openlogfile(
136 ui, self._repo.vfs, name='blackbox.log',
136 ui, self._repo.vfs, name='blackbox.log',
137 maxfiles=self._maxfiles, maxsize=self._maxsize) as fp:
137 maxfiles=self._maxfiles, maxsize=self._maxsize) as fp:
138 fp.write(fmt % args)
138 fp.write(fmt % args)
139 except (IOError, OSError) as err:
139 except (IOError, OSError) as err:
140 # deactivate this to avoid failed logging again
140 # deactivate this to avoid failed logging again
141 self._trackedevents.clear()
141 self._trackedevents.clear()
142 ui.debug('warning: cannot write to blackbox.log: %s\n' %
142 ui.debug('warning: cannot write to blackbox.log: %s\n' %
143 encoding.strtolocal(err.strerror))
143 encoding.strtolocal(err.strerror))
144 return
144 return
145 _lastlogger.logger = self
145 _lastlogger.logger = self
146
146
147 def uipopulate(ui):
147 def uipopulate(ui):
148 ui.setlogger(b'blackbox', _lastlogger)
148 ui.setlogger(b'blackbox', _lastlogger)
149
149
150 def reposetup(ui, repo):
150 def reposetup(ui, repo):
151 # During 'hg pull' a httppeer repo is created to represent the remote repo.
151 # During 'hg pull' a httppeer repo is created to represent the remote repo.
152 # It doesn't have a .hg directory to put a blackbox in, so we don't do
152 # It doesn't have a .hg directory to put a blackbox in, so we don't do
153 # the blackbox setup for it.
153 # the blackbox setup for it.
154 if not repo.local():
154 if not repo.local():
155 return
155 return
156
156
157 # Since blackbox.log is stored in the repo directory, the logger should be
157 # Since blackbox.log is stored in the repo directory, the logger should be
158 # instantiated per repository.
158 # instantiated per repository.
159 logger = blackboxlogger(ui, repo)
159 logger = blackboxlogger(ui, repo)
160 ui.setlogger(b'blackbox', logger)
160 ui.setlogger(b'blackbox', logger)
161
161
162 # Set _lastlogger even if ui.log is not called. This gives blackbox a
162 # Set _lastlogger even if ui.log is not called. This gives blackbox a
163 # fallback place to log
163 # fallback place to log
164 if _lastlogger.logger is None:
164 if _lastlogger.logger is None:
165 _lastlogger.logger = logger
165 _lastlogger.logger = logger
166
166
167 repo._wlockfreeprefix.add('blackbox.log')
167 repo._wlockfreeprefix.add('blackbox.log')
168
168
169 @command('blackbox',
169 @command('blackbox',
170 [('l', 'limit', 10, _('the number of events to show')),
170 [('l', 'limit', 10, _('the number of events to show')),
171 ],
171 ],
172 _('hg blackbox [OPTION]...'),
172 _('hg blackbox [OPTION]...'),
173 helpcategory=command.CATEGORY_MAINTENANCE,
173 helpcategory=command.CATEGORY_MAINTENANCE,
174 helpbasic=True)
174 helpbasic=True)
175 def blackbox(ui, repo, *revs, **opts):
175 def blackbox(ui, repo, *revs, **opts):
176 '''view the recent repository events
176 '''view the recent repository events
177 '''
177 '''
178
178
179 if not repo.vfs.exists('blackbox.log'):
179 if not repo.vfs.exists('blackbox.log'):
180 return
180 return
181
181
182 limit = opts.get(r'limit')
182 limit = opts.get(r'limit')
183 fp = repo.vfs('blackbox.log', 'r')
183 fp = repo.vfs('blackbox.log', 'r')
184 lines = fp.read().split('\n')
184 lines = fp.read().split('\n')
185
185
186 count = 0
186 count = 0
187 output = []
187 output = []
188 for line in reversed(lines):
188 for line in reversed(lines):
189 if count >= limit:
189 if count >= limit:
190 break
190 break
191
191
192 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
192 # count the commands by matching lines like: 2013/01/23 19:13:36 root>
193 if re.match('^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
193 if re.match(br'^\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} .*> .*', line):
194 count += 1
194 count += 1
195 output.append(line)
195 output.append(line)
196
196
197 ui.status('\n'.join(reversed(output)))
197 ui.status('\n'.join(reversed(output)))
@@ -1,72 +1,72
1 # commitextras.py
1 # commitextras.py
2 #
2 #
3 # Copyright 2013 Facebook, Inc.
3 # Copyright 2013 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''adds a new flag extras to commit (ADVANCED)'''
8 '''adds a new flag extras to commit (ADVANCED)'''
9
9
10 from __future__ import absolute_import
10 from __future__ import absolute_import
11
11
12 import re
12 import re
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 commands,
16 commands,
17 error,
17 error,
18 extensions,
18 extensions,
19 registrar,
19 registrar,
20 util,
20 util,
21 )
21 )
22
22
23 cmdtable = {}
23 cmdtable = {}
24 command = registrar.command(cmdtable)
24 command = registrar.command(cmdtable)
25 testedwith = 'ships-with-hg-core'
25 testedwith = 'ships-with-hg-core'
26
26
27 usedinternally = {
27 usedinternally = {
28 'amend_source',
28 'amend_source',
29 'branch',
29 'branch',
30 'close',
30 'close',
31 'histedit_source',
31 'histedit_source',
32 'topic',
32 'topic',
33 'rebase_source',
33 'rebase_source',
34 'intermediate-source',
34 'intermediate-source',
35 '__touch-noise__',
35 '__touch-noise__',
36 'source',
36 'source',
37 'transplant_source',
37 'transplant_source',
38 }
38 }
39
39
40 def extsetup(ui):
40 def extsetup(ui):
41 entry = extensions.wrapcommand(commands.table, 'commit', _commit)
41 entry = extensions.wrapcommand(commands.table, 'commit', _commit)
42 options = entry[1]
42 options = entry[1]
43 options.append(('', 'extra', [],
43 options.append(('', 'extra', [],
44 _('set a changeset\'s extra values'), _("KEY=VALUE")))
44 _('set a changeset\'s extra values'), _("KEY=VALUE")))
45
45
46 def _commit(orig, ui, repo, *pats, **opts):
46 def _commit(orig, ui, repo, *pats, **opts):
47 if util.safehasattr(repo, 'unfiltered'):
47 if util.safehasattr(repo, 'unfiltered'):
48 repo = repo.unfiltered()
48 repo = repo.unfiltered()
49 class repoextra(repo.__class__):
49 class repoextra(repo.__class__):
50 def commit(self, *innerpats, **inneropts):
50 def commit(self, *innerpats, **inneropts):
51 extras = opts.get(r'extra')
51 extras = opts.get(r'extra')
52 for raw in extras:
52 for raw in extras:
53 if '=' not in raw:
53 if '=' not in raw:
54 msg = _("unable to parse '%s', should follow "
54 msg = _("unable to parse '%s', should follow "
55 "KEY=VALUE format")
55 "KEY=VALUE format")
56 raise error.Abort(msg % raw)
56 raise error.Abort(msg % raw)
57 k, v = raw.split('=', 1)
57 k, v = raw.split('=', 1)
58 if not k:
58 if not k:
59 msg = _("unable to parse '%s', keys can't be empty")
59 msg = _("unable to parse '%s', keys can't be empty")
60 raise error.Abort(msg % raw)
60 raise error.Abort(msg % raw)
61 if re.search('[^\w-]', k):
61 if re.search(br'[^\w-]', k):
62 msg = _("keys can only contain ascii letters, digits,"
62 msg = _("keys can only contain ascii letters, digits,"
63 " '_' and '-'")
63 " '_' and '-'")
64 raise error.Abort(msg)
64 raise error.Abort(msg)
65 if k in usedinternally:
65 if k in usedinternally:
66 msg = _("key '%s' is used internally, can't be set "
66 msg = _("key '%s' is used internally, can't be set "
67 "manually")
67 "manually")
68 raise error.Abort(msg % k)
68 raise error.Abort(msg % k)
69 inneropts[r'extra'][k] = v
69 inneropts[r'extra'][k] = v
70 return super(repoextra, self).commit(*innerpats, **inneropts)
70 return super(repoextra, self).commit(*innerpats, **inneropts)
71 repo.__class__ = repoextra
71 repo.__class__ = repoextra
72 return orig(ui, repo, *pats, **opts)
72 return orig(ui, repo, *pats, **opts)
@@ -1,965 +1,965
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import functools
9 import functools
10 import os
10 import os
11 import re
11 import re
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial import (
14 from mercurial import (
15 encoding,
15 encoding,
16 error,
16 error,
17 hook,
17 hook,
18 pycompat,
18 pycompat,
19 util,
19 util,
20 )
20 )
21 from mercurial.utils import (
21 from mercurial.utils import (
22 dateutil,
22 dateutil,
23 procutil,
23 procutil,
24 stringutil,
24 stringutil,
25 )
25 )
26
26
27 pickle = util.pickle
27 pickle = util.pickle
28
28
29 class logentry(object):
29 class logentry(object):
30 '''Class logentry has the following attributes:
30 '''Class logentry has the following attributes:
31 .author - author name as CVS knows it
31 .author - author name as CVS knows it
32 .branch - name of branch this revision is on
32 .branch - name of branch this revision is on
33 .branches - revision tuple of branches starting at this revision
33 .branches - revision tuple of branches starting at this revision
34 .comment - commit message
34 .comment - commit message
35 .commitid - CVS commitid or None
35 .commitid - CVS commitid or None
36 .date - the commit date as a (time, tz) tuple
36 .date - the commit date as a (time, tz) tuple
37 .dead - true if file revision is dead
37 .dead - true if file revision is dead
38 .file - Name of file
38 .file - Name of file
39 .lines - a tuple (+lines, -lines) or None
39 .lines - a tuple (+lines, -lines) or None
40 .parent - Previous revision of this entry
40 .parent - Previous revision of this entry
41 .rcs - name of file as returned from CVS
41 .rcs - name of file as returned from CVS
42 .revision - revision number as tuple
42 .revision - revision number as tuple
43 .tags - list of tags on the file
43 .tags - list of tags on the file
44 .synthetic - is this a synthetic "file ... added on ..." revision?
44 .synthetic - is this a synthetic "file ... added on ..." revision?
45 .mergepoint - the branch that has been merged from (if present in
45 .mergepoint - the branch that has been merged from (if present in
46 rlog output) or None
46 rlog output) or None
47 .branchpoints - the branches that start at the current entry or empty
47 .branchpoints - the branches that start at the current entry or empty
48 '''
48 '''
49 def __init__(self, **entries):
49 def __init__(self, **entries):
50 self.synthetic = False
50 self.synthetic = False
51 self.__dict__.update(entries)
51 self.__dict__.update(entries)
52
52
53 def __repr__(self):
53 def __repr__(self):
54 items = (r"%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
54 items = (r"%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
55 return r"%s(%s)"%(type(self).__name__, r", ".join(items))
55 return r"%s(%s)"%(type(self).__name__, r", ".join(items))
56
56
57 class logerror(Exception):
57 class logerror(Exception):
58 pass
58 pass
59
59
60 def getrepopath(cvspath):
60 def getrepopath(cvspath):
61 """Return the repository path from a CVS path.
61 """Return the repository path from a CVS path.
62
62
63 >>> getrepopath(b'/foo/bar')
63 >>> getrepopath(b'/foo/bar')
64 '/foo/bar'
64 '/foo/bar'
65 >>> getrepopath(b'c:/foo/bar')
65 >>> getrepopath(b'c:/foo/bar')
66 '/foo/bar'
66 '/foo/bar'
67 >>> getrepopath(b':pserver:10/foo/bar')
67 >>> getrepopath(b':pserver:10/foo/bar')
68 '/foo/bar'
68 '/foo/bar'
69 >>> getrepopath(b':pserver:10c:/foo/bar')
69 >>> getrepopath(b':pserver:10c:/foo/bar')
70 '/foo/bar'
70 '/foo/bar'
71 >>> getrepopath(b':pserver:/foo/bar')
71 >>> getrepopath(b':pserver:/foo/bar')
72 '/foo/bar'
72 '/foo/bar'
73 >>> getrepopath(b':pserver:c:/foo/bar')
73 >>> getrepopath(b':pserver:c:/foo/bar')
74 '/foo/bar'
74 '/foo/bar'
75 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
75 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
76 '/foo/bar'
76 '/foo/bar'
77 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
77 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
78 '/foo/bar'
78 '/foo/bar'
79 >>> getrepopath(b'user@server/path/to/repository')
79 >>> getrepopath(b'user@server/path/to/repository')
80 '/path/to/repository'
80 '/path/to/repository'
81 """
81 """
82 # According to CVS manual, CVS paths are expressed like:
82 # According to CVS manual, CVS paths are expressed like:
83 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
83 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
84 #
84 #
85 # CVSpath is splitted into parts and then position of the first occurrence
85 # CVSpath is splitted into parts and then position of the first occurrence
86 # of the '/' char after the '@' is located. The solution is the rest of the
86 # of the '/' char after the '@' is located. The solution is the rest of the
87 # string after that '/' sign including it
87 # string after that '/' sign including it
88
88
89 parts = cvspath.split(':')
89 parts = cvspath.split(':')
90 atposition = parts[-1].find('@')
90 atposition = parts[-1].find('@')
91 start = 0
91 start = 0
92
92
93 if atposition != -1:
93 if atposition != -1:
94 start = atposition
94 start = atposition
95
95
96 repopath = parts[-1][parts[-1].find('/', start):]
96 repopath = parts[-1][parts[-1].find('/', start):]
97 return repopath
97 return repopath
98
98
99 def createlog(ui, directory=None, root="", rlog=True, cache=None):
99 def createlog(ui, directory=None, root="", rlog=True, cache=None):
100 '''Collect the CVS rlog'''
100 '''Collect the CVS rlog'''
101
101
102 # Because we store many duplicate commit log messages, reusing strings
102 # Because we store many duplicate commit log messages, reusing strings
103 # saves a lot of memory and pickle storage space.
103 # saves a lot of memory and pickle storage space.
104 _scache = {}
104 _scache = {}
105 def scache(s):
105 def scache(s):
106 "return a shared version of a string"
106 "return a shared version of a string"
107 return _scache.setdefault(s, s)
107 return _scache.setdefault(s, s)
108
108
109 ui.status(_('collecting CVS rlog\n'))
109 ui.status(_('collecting CVS rlog\n'))
110
110
111 log = [] # list of logentry objects containing the CVS state
111 log = [] # list of logentry objects containing the CVS state
112
112
113 # patterns to match in CVS (r)log output, by state of use
113 # patterns to match in CVS (r)log output, by state of use
114 re_00 = re.compile(b'RCS file: (.+)$')
114 re_00 = re.compile(b'RCS file: (.+)$')
115 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
115 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
116 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
116 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
117 re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
117 re_03 = re.compile(b"(Cannot access.+CVSROOT)|"
118 b"(can't create temporary directory.+)$")
118 b"(can't create temporary directory.+)$")
119 re_10 = re.compile(b'Working file: (.+)$')
119 re_10 = re.compile(b'Working file: (.+)$')
120 re_20 = re.compile(b'symbolic names:')
120 re_20 = re.compile(b'symbolic names:')
121 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
121 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
122 re_31 = re.compile(b'----------------------------$')
122 re_31 = re.compile(b'----------------------------$')
123 re_32 = re.compile(b'======================================='
123 re_32 = re.compile(b'======================================='
124 b'======================================$')
124 b'======================================$')
125 re_50 = re.compile(b'revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
125 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
126 re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
126 re_60 = re.compile(br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
127 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
127 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
128 br'(\s+commitid:\s+([^;]+);)?'
128 br'(\s+commitid:\s+([^;]+);)?'
129 br'(.*mergepoint:\s+([^;]+);)?')
129 br'(.*mergepoint:\s+([^;]+);)?')
130 re_70 = re.compile(b'branches: (.+);$')
130 re_70 = re.compile(b'branches: (.+);$')
131
131
132 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
132 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
133
133
134 prefix = '' # leading path to strip of what we get from CVS
134 prefix = '' # leading path to strip of what we get from CVS
135
135
136 if directory is None:
136 if directory is None:
137 # Current working directory
137 # Current working directory
138
138
139 # Get the real directory in the repository
139 # Get the real directory in the repository
140 try:
140 try:
141 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
141 prefix = open(os.path.join('CVS','Repository'), 'rb').read().strip()
142 directory = prefix
142 directory = prefix
143 if prefix == ".":
143 if prefix == ".":
144 prefix = ""
144 prefix = ""
145 except IOError:
145 except IOError:
146 raise logerror(_('not a CVS sandbox'))
146 raise logerror(_('not a CVS sandbox'))
147
147
148 if prefix and not prefix.endswith(pycompat.ossep):
148 if prefix and not prefix.endswith(pycompat.ossep):
149 prefix += pycompat.ossep
149 prefix += pycompat.ossep
150
150
151 # Use the Root file in the sandbox, if it exists
151 # Use the Root file in the sandbox, if it exists
152 try:
152 try:
153 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
153 root = open(os.path.join('CVS','Root'), 'rb').read().strip()
154 except IOError:
154 except IOError:
155 pass
155 pass
156
156
157 if not root:
157 if not root:
158 root = encoding.environ.get('CVSROOT', '')
158 root = encoding.environ.get('CVSROOT', '')
159
159
160 # read log cache if one exists
160 # read log cache if one exists
161 oldlog = []
161 oldlog = []
162 date = None
162 date = None
163
163
164 if cache:
164 if cache:
165 cachedir = os.path.expanduser('~/.hg.cvsps')
165 cachedir = os.path.expanduser('~/.hg.cvsps')
166 if not os.path.exists(cachedir):
166 if not os.path.exists(cachedir):
167 os.mkdir(cachedir)
167 os.mkdir(cachedir)
168
168
169 # The cvsps cache pickle needs a uniquified name, based on the
169 # The cvsps cache pickle needs a uniquified name, based on the
170 # repository location. The address may have all sort of nasties
170 # repository location. The address may have all sort of nasties
171 # in it, slashes, colons and such. So here we take just the
171 # in it, slashes, colons and such. So here we take just the
172 # alphanumeric characters, concatenated in a way that does not
172 # alphanumeric characters, concatenated in a way that does not
173 # mix up the various components, so that
173 # mix up the various components, so that
174 # :pserver:user@server:/path
174 # :pserver:user@server:/path
175 # and
175 # and
176 # /pserver/user/server/path
176 # /pserver/user/server/path
177 # are mapped to different cache file names.
177 # are mapped to different cache file names.
178 cachefile = root.split(":") + [directory, "cache"]
178 cachefile = root.split(":") + [directory, "cache"]
179 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
179 cachefile = ['-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
180 cachefile = os.path.join(cachedir,
180 cachefile = os.path.join(cachedir,
181 '.'.join([s for s in cachefile if s]))
181 '.'.join([s for s in cachefile if s]))
182
182
183 if cache == 'update':
183 if cache == 'update':
184 try:
184 try:
185 ui.note(_('reading cvs log cache %s\n') % cachefile)
185 ui.note(_('reading cvs log cache %s\n') % cachefile)
186 oldlog = pickle.load(open(cachefile, 'rb'))
186 oldlog = pickle.load(open(cachefile, 'rb'))
187 for e in oldlog:
187 for e in oldlog:
188 if not (util.safehasattr(e, 'branchpoints') and
188 if not (util.safehasattr(e, 'branchpoints') and
189 util.safehasattr(e, 'commitid') and
189 util.safehasattr(e, 'commitid') and
190 util.safehasattr(e, 'mergepoint')):
190 util.safehasattr(e, 'mergepoint')):
191 ui.status(_('ignoring old cache\n'))
191 ui.status(_('ignoring old cache\n'))
192 oldlog = []
192 oldlog = []
193 break
193 break
194
194
195 ui.note(_('cache has %d log entries\n') % len(oldlog))
195 ui.note(_('cache has %d log entries\n') % len(oldlog))
196 except Exception as e:
196 except Exception as e:
197 ui.note(_('error reading cache: %r\n') % e)
197 ui.note(_('error reading cache: %r\n') % e)
198
198
199 if oldlog:
199 if oldlog:
200 date = oldlog[-1].date # last commit date as a (time,tz) tuple
200 date = oldlog[-1].date # last commit date as a (time,tz) tuple
201 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
201 date = dateutil.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
202
202
203 # build the CVS commandline
203 # build the CVS commandline
204 cmd = ['cvs', '-q']
204 cmd = ['cvs', '-q']
205 if root:
205 if root:
206 cmd.append('-d%s' % root)
206 cmd.append('-d%s' % root)
207 p = util.normpath(getrepopath(root))
207 p = util.normpath(getrepopath(root))
208 if not p.endswith('/'):
208 if not p.endswith('/'):
209 p += '/'
209 p += '/'
210 if prefix:
210 if prefix:
211 # looks like normpath replaces "" by "."
211 # looks like normpath replaces "" by "."
212 prefix = p + util.normpath(prefix)
212 prefix = p + util.normpath(prefix)
213 else:
213 else:
214 prefix = p
214 prefix = p
215 cmd.append(['log', 'rlog'][rlog])
215 cmd.append(['log', 'rlog'][rlog])
216 if date:
216 if date:
217 # no space between option and date string
217 # no space between option and date string
218 cmd.append('-d>%s' % date)
218 cmd.append('-d>%s' % date)
219 cmd.append(directory)
219 cmd.append(directory)
220
220
221 # state machine begins here
221 # state machine begins here
222 tags = {} # dictionary of revisions on current file with their tags
222 tags = {} # dictionary of revisions on current file with their tags
223 branchmap = {} # mapping between branch names and revision numbers
223 branchmap = {} # mapping between branch names and revision numbers
224 rcsmap = {}
224 rcsmap = {}
225 state = 0
225 state = 0
226 store = False # set when a new record can be appended
226 store = False # set when a new record can be appended
227
227
228 cmd = [procutil.shellquote(arg) for arg in cmd]
228 cmd = [procutil.shellquote(arg) for arg in cmd]
229 ui.note(_("running %s\n") % (' '.join(cmd)))
229 ui.note(_("running %s\n") % (' '.join(cmd)))
230 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
230 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
231
231
232 pfp = procutil.popen(' '.join(cmd), 'rb')
232 pfp = procutil.popen(' '.join(cmd), 'rb')
233 peek = util.fromnativeeol(pfp.readline())
233 peek = util.fromnativeeol(pfp.readline())
234 while True:
234 while True:
235 line = peek
235 line = peek
236 if line == '':
236 if line == '':
237 break
237 break
238 peek = util.fromnativeeol(pfp.readline())
238 peek = util.fromnativeeol(pfp.readline())
239 if line.endswith('\n'):
239 if line.endswith('\n'):
240 line = line[:-1]
240 line = line[:-1]
241 #ui.debug('state=%d line=%r\n' % (state, line))
241 #ui.debug('state=%d line=%r\n' % (state, line))
242
242
243 if state == 0:
243 if state == 0:
244 # initial state, consume input until we see 'RCS file'
244 # initial state, consume input until we see 'RCS file'
245 match = re_00.match(line)
245 match = re_00.match(line)
246 if match:
246 if match:
247 rcs = match.group(1)
247 rcs = match.group(1)
248 tags = {}
248 tags = {}
249 if rlog:
249 if rlog:
250 filename = util.normpath(rcs[:-2])
250 filename = util.normpath(rcs[:-2])
251 if filename.startswith(prefix):
251 if filename.startswith(prefix):
252 filename = filename[len(prefix):]
252 filename = filename[len(prefix):]
253 if filename.startswith('/'):
253 if filename.startswith('/'):
254 filename = filename[1:]
254 filename = filename[1:]
255 if filename.startswith('Attic/'):
255 if filename.startswith('Attic/'):
256 filename = filename[6:]
256 filename = filename[6:]
257 else:
257 else:
258 filename = filename.replace('/Attic/', '/')
258 filename = filename.replace('/Attic/', '/')
259 state = 2
259 state = 2
260 continue
260 continue
261 state = 1
261 state = 1
262 continue
262 continue
263 match = re_01.match(line)
263 match = re_01.match(line)
264 if match:
264 if match:
265 raise logerror(match.group(1))
265 raise logerror(match.group(1))
266 match = re_02.match(line)
266 match = re_02.match(line)
267 if match:
267 if match:
268 raise logerror(match.group(2))
268 raise logerror(match.group(2))
269 if re_03.match(line):
269 if re_03.match(line):
270 raise logerror(line)
270 raise logerror(line)
271
271
272 elif state == 1:
272 elif state == 1:
273 # expect 'Working file' (only when using log instead of rlog)
273 # expect 'Working file' (only when using log instead of rlog)
274 match = re_10.match(line)
274 match = re_10.match(line)
275 assert match, _('RCS file must be followed by working file')
275 assert match, _('RCS file must be followed by working file')
276 filename = util.normpath(match.group(1))
276 filename = util.normpath(match.group(1))
277 state = 2
277 state = 2
278
278
279 elif state == 2:
279 elif state == 2:
280 # expect 'symbolic names'
280 # expect 'symbolic names'
281 if re_20.match(line):
281 if re_20.match(line):
282 branchmap = {}
282 branchmap = {}
283 state = 3
283 state = 3
284
284
285 elif state == 3:
285 elif state == 3:
286 # read the symbolic names and store as tags
286 # read the symbolic names and store as tags
287 match = re_30.match(line)
287 match = re_30.match(line)
288 if match:
288 if match:
289 rev = [int(x) for x in match.group(2).split('.')]
289 rev = [int(x) for x in match.group(2).split('.')]
290
290
291 # Convert magic branch number to an odd-numbered one
291 # Convert magic branch number to an odd-numbered one
292 revn = len(rev)
292 revn = len(rev)
293 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
293 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
294 rev = rev[:-2] + rev[-1:]
294 rev = rev[:-2] + rev[-1:]
295 rev = tuple(rev)
295 rev = tuple(rev)
296
296
297 if rev not in tags:
297 if rev not in tags:
298 tags[rev] = []
298 tags[rev] = []
299 tags[rev].append(match.group(1))
299 tags[rev].append(match.group(1))
300 branchmap[match.group(1)] = match.group(2)
300 branchmap[match.group(1)] = match.group(2)
301
301
302 elif re_31.match(line):
302 elif re_31.match(line):
303 state = 5
303 state = 5
304 elif re_32.match(line):
304 elif re_32.match(line):
305 state = 0
305 state = 0
306
306
307 elif state == 4:
307 elif state == 4:
308 # expecting '------' separator before first revision
308 # expecting '------' separator before first revision
309 if re_31.match(line):
309 if re_31.match(line):
310 state = 5
310 state = 5
311 else:
311 else:
312 assert not re_32.match(line), _('must have at least '
312 assert not re_32.match(line), _('must have at least '
313 'some revisions')
313 'some revisions')
314
314
315 elif state == 5:
315 elif state == 5:
316 # expecting revision number and possibly (ignored) lock indication
316 # expecting revision number and possibly (ignored) lock indication
317 # we create the logentry here from values stored in states 0 to 4,
317 # we create the logentry here from values stored in states 0 to 4,
318 # as this state is re-entered for subsequent revisions of a file.
318 # as this state is re-entered for subsequent revisions of a file.
319 match = re_50.match(line)
319 match = re_50.match(line)
320 assert match, _('expected revision number')
320 assert match, _('expected revision number')
321 e = logentry(rcs=scache(rcs),
321 e = logentry(rcs=scache(rcs),
322 file=scache(filename),
322 file=scache(filename),
323 revision=tuple([int(x) for x in
323 revision=tuple([int(x) for x in
324 match.group(1).split('.')]),
324 match.group(1).split('.')]),
325 branches=[],
325 branches=[],
326 parent=None,
326 parent=None,
327 commitid=None,
327 commitid=None,
328 mergepoint=None,
328 mergepoint=None,
329 branchpoints=set())
329 branchpoints=set())
330
330
331 state = 6
331 state = 6
332
332
333 elif state == 6:
333 elif state == 6:
334 # expecting date, author, state, lines changed
334 # expecting date, author, state, lines changed
335 match = re_60.match(line)
335 match = re_60.match(line)
336 assert match, _('revision must be followed by date line')
336 assert match, _('revision must be followed by date line')
337 d = match.group(1)
337 d = match.group(1)
338 if d[2] == '/':
338 if d[2] == '/':
339 # Y2K
339 # Y2K
340 d = '19' + d
340 d = '19' + d
341
341
342 if len(d.split()) != 3:
342 if len(d.split()) != 3:
343 # cvs log dates always in GMT
343 # cvs log dates always in GMT
344 d = d + ' UTC'
344 d = d + ' UTC'
345 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
345 e.date = dateutil.parsedate(d, ['%y/%m/%d %H:%M:%S',
346 '%Y/%m/%d %H:%M:%S',
346 '%Y/%m/%d %H:%M:%S',
347 '%Y-%m-%d %H:%M:%S'])
347 '%Y-%m-%d %H:%M:%S'])
348 e.author = scache(match.group(2))
348 e.author = scache(match.group(2))
349 e.dead = match.group(3).lower() == 'dead'
349 e.dead = match.group(3).lower() == 'dead'
350
350
351 if match.group(5):
351 if match.group(5):
352 if match.group(6):
352 if match.group(6):
353 e.lines = (int(match.group(5)), int(match.group(6)))
353 e.lines = (int(match.group(5)), int(match.group(6)))
354 else:
354 else:
355 e.lines = (int(match.group(5)), 0)
355 e.lines = (int(match.group(5)), 0)
356 elif match.group(6):
356 elif match.group(6):
357 e.lines = (0, int(match.group(6)))
357 e.lines = (0, int(match.group(6)))
358 else:
358 else:
359 e.lines = None
359 e.lines = None
360
360
361 if match.group(7): # cvs 1.12 commitid
361 if match.group(7): # cvs 1.12 commitid
362 e.commitid = match.group(8)
362 e.commitid = match.group(8)
363
363
364 if match.group(9): # cvsnt mergepoint
364 if match.group(9): # cvsnt mergepoint
365 myrev = match.group(10).split('.')
365 myrev = match.group(10).split('.')
366 if len(myrev) == 2: # head
366 if len(myrev) == 2: # head
367 e.mergepoint = 'HEAD'
367 e.mergepoint = 'HEAD'
368 else:
368 else:
369 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
369 myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
370 branches = [b for b in branchmap if branchmap[b] == myrev]
370 branches = [b for b in branchmap if branchmap[b] == myrev]
371 assert len(branches) == 1, ('unknown branch: %s'
371 assert len(branches) == 1, ('unknown branch: %s'
372 % e.mergepoint)
372 % e.mergepoint)
373 e.mergepoint = branches[0]
373 e.mergepoint = branches[0]
374
374
375 e.comment = []
375 e.comment = []
376 state = 7
376 state = 7
377
377
378 elif state == 7:
378 elif state == 7:
379 # read the revision numbers of branches that start at this revision
379 # read the revision numbers of branches that start at this revision
380 # or store the commit log message otherwise
380 # or store the commit log message otherwise
381 m = re_70.match(line)
381 m = re_70.match(line)
382 if m:
382 if m:
383 e.branches = [tuple([int(y) for y in x.strip().split('.')])
383 e.branches = [tuple([int(y) for y in x.strip().split('.')])
384 for x in m.group(1).split(';')]
384 for x in m.group(1).split(';')]
385 state = 8
385 state = 8
386 elif re_31.match(line) and re_50.match(peek):
386 elif re_31.match(line) and re_50.match(peek):
387 state = 5
387 state = 5
388 store = True
388 store = True
389 elif re_32.match(line):
389 elif re_32.match(line):
390 state = 0
390 state = 0
391 store = True
391 store = True
392 else:
392 else:
393 e.comment.append(line)
393 e.comment.append(line)
394
394
395 elif state == 8:
395 elif state == 8:
396 # store commit log message
396 # store commit log message
397 if re_31.match(line):
397 if re_31.match(line):
398 cpeek = peek
398 cpeek = peek
399 if cpeek.endswith('\n'):
399 if cpeek.endswith('\n'):
400 cpeek = cpeek[:-1]
400 cpeek = cpeek[:-1]
401 if re_50.match(cpeek):
401 if re_50.match(cpeek):
402 state = 5
402 state = 5
403 store = True
403 store = True
404 else:
404 else:
405 e.comment.append(line)
405 e.comment.append(line)
406 elif re_32.match(line):
406 elif re_32.match(line):
407 state = 0
407 state = 0
408 store = True
408 store = True
409 else:
409 else:
410 e.comment.append(line)
410 e.comment.append(line)
411
411
412 # When a file is added on a branch B1, CVS creates a synthetic
412 # When a file is added on a branch B1, CVS creates a synthetic
413 # dead trunk revision 1.1 so that the branch has a root.
413 # dead trunk revision 1.1 so that the branch has a root.
414 # Likewise, if you merge such a file to a later branch B2 (one
414 # Likewise, if you merge such a file to a later branch B2 (one
415 # that already existed when the file was added on B1), CVS
415 # that already existed when the file was added on B1), CVS
416 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
416 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
417 # these revisions now, but mark them synthetic so
417 # these revisions now, but mark them synthetic so
418 # createchangeset() can take care of them.
418 # createchangeset() can take care of them.
419 if (store and
419 if (store and
420 e.dead and
420 e.dead and
421 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
421 e.revision[-1] == 1 and # 1.1 or 1.1.x.1
422 len(e.comment) == 1 and
422 len(e.comment) == 1 and
423 file_added_re.match(e.comment[0])):
423 file_added_re.match(e.comment[0])):
424 ui.debug('found synthetic revision in %s: %r\n'
424 ui.debug('found synthetic revision in %s: %r\n'
425 % (e.rcs, e.comment[0]))
425 % (e.rcs, e.comment[0]))
426 e.synthetic = True
426 e.synthetic = True
427
427
428 if store:
428 if store:
429 # clean up the results and save in the log.
429 # clean up the results and save in the log.
430 store = False
430 store = False
431 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
431 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
432 e.comment = scache('\n'.join(e.comment))
432 e.comment = scache('\n'.join(e.comment))
433
433
434 revn = len(e.revision)
434 revn = len(e.revision)
435 if revn > 3 and (revn % 2) == 0:
435 if revn > 3 and (revn % 2) == 0:
436 e.branch = tags.get(e.revision[:-1], [None])[0]
436 e.branch = tags.get(e.revision[:-1], [None])[0]
437 else:
437 else:
438 e.branch = None
438 e.branch = None
439
439
440 # find the branches starting from this revision
440 # find the branches starting from this revision
441 branchpoints = set()
441 branchpoints = set()
442 for branch, revision in branchmap.iteritems():
442 for branch, revision in branchmap.iteritems():
443 revparts = tuple([int(i) for i in revision.split('.')])
443 revparts = tuple([int(i) for i in revision.split('.')])
444 if len(revparts) < 2: # bad tags
444 if len(revparts) < 2: # bad tags
445 continue
445 continue
446 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
446 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
447 # normal branch
447 # normal branch
448 if revparts[:-2] == e.revision:
448 if revparts[:-2] == e.revision:
449 branchpoints.add(branch)
449 branchpoints.add(branch)
450 elif revparts == (1, 1, 1): # vendor branch
450 elif revparts == (1, 1, 1): # vendor branch
451 if revparts in e.branches:
451 if revparts in e.branches:
452 branchpoints.add(branch)
452 branchpoints.add(branch)
453 e.branchpoints = branchpoints
453 e.branchpoints = branchpoints
454
454
455 log.append(e)
455 log.append(e)
456
456
457 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
457 rcsmap[e.rcs.replace('/Attic/', '/')] = e.rcs
458
458
459 if len(log) % 100 == 0:
459 if len(log) % 100 == 0:
460 ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
460 ui.status(stringutil.ellipsis('%d %s' % (len(log), e.file), 80)
461 + '\n')
461 + '\n')
462
462
463 log.sort(key=lambda x: (x.rcs, x.revision))
463 log.sort(key=lambda x: (x.rcs, x.revision))
464
464
465 # find parent revisions of individual files
465 # find parent revisions of individual files
466 versions = {}
466 versions = {}
467 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
467 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
468 rcs = e.rcs.replace('/Attic/', '/')
468 rcs = e.rcs.replace('/Attic/', '/')
469 if rcs in rcsmap:
469 if rcs in rcsmap:
470 e.rcs = rcsmap[rcs]
470 e.rcs = rcsmap[rcs]
471 branch = e.revision[:-1]
471 branch = e.revision[:-1]
472 versions[(e.rcs, branch)] = e.revision
472 versions[(e.rcs, branch)] = e.revision
473
473
474 for e in log:
474 for e in log:
475 branch = e.revision[:-1]
475 branch = e.revision[:-1]
476 p = versions.get((e.rcs, branch), None)
476 p = versions.get((e.rcs, branch), None)
477 if p is None:
477 if p is None:
478 p = e.revision[:-2]
478 p = e.revision[:-2]
479 e.parent = p
479 e.parent = p
480 versions[(e.rcs, branch)] = e.revision
480 versions[(e.rcs, branch)] = e.revision
481
481
482 # update the log cache
482 # update the log cache
483 if cache:
483 if cache:
484 if log:
484 if log:
485 # join up the old and new logs
485 # join up the old and new logs
486 log.sort(key=lambda x: x.date)
486 log.sort(key=lambda x: x.date)
487
487
488 if oldlog and oldlog[-1].date >= log[0].date:
488 if oldlog and oldlog[-1].date >= log[0].date:
489 raise logerror(_('log cache overlaps with new log entries,'
489 raise logerror(_('log cache overlaps with new log entries,'
490 ' re-run without cache.'))
490 ' re-run without cache.'))
491
491
492 log = oldlog + log
492 log = oldlog + log
493
493
494 # write the new cachefile
494 # write the new cachefile
495 ui.note(_('writing cvs log cache %s\n') % cachefile)
495 ui.note(_('writing cvs log cache %s\n') % cachefile)
496 pickle.dump(log, open(cachefile, 'wb'))
496 pickle.dump(log, open(cachefile, 'wb'))
497 else:
497 else:
498 log = oldlog
498 log = oldlog
499
499
500 ui.status(_('%d log entries\n') % len(log))
500 ui.status(_('%d log entries\n') % len(log))
501
501
502 encodings = ui.configlist('convert', 'cvsps.logencoding')
502 encodings = ui.configlist('convert', 'cvsps.logencoding')
503 if encodings:
503 if encodings:
504 def revstr(r):
504 def revstr(r):
505 # this is needed, because logentry.revision is a tuple of "int"
505 # this is needed, because logentry.revision is a tuple of "int"
506 # (e.g. (1, 2) for "1.2")
506 # (e.g. (1, 2) for "1.2")
507 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
507 return '.'.join(pycompat.maplist(pycompat.bytestr, r))
508
508
509 for entry in log:
509 for entry in log:
510 comment = entry.comment
510 comment = entry.comment
511 for e in encodings:
511 for e in encodings:
512 try:
512 try:
513 entry.comment = comment.decode(
513 entry.comment = comment.decode(
514 pycompat.sysstr(e)).encode('utf-8')
514 pycompat.sysstr(e)).encode('utf-8')
515 if ui.debugflag:
515 if ui.debugflag:
516 ui.debug("transcoding by %s: %s of %s\n" %
516 ui.debug("transcoding by %s: %s of %s\n" %
517 (e, revstr(entry.revision), entry.file))
517 (e, revstr(entry.revision), entry.file))
518 break
518 break
519 except UnicodeDecodeError:
519 except UnicodeDecodeError:
520 pass # try next encoding
520 pass # try next encoding
521 except LookupError as inst: # unknown encoding, maybe
521 except LookupError as inst: # unknown encoding, maybe
522 raise error.Abort(inst,
522 raise error.Abort(inst,
523 hint=_('check convert.cvsps.logencoding'
523 hint=_('check convert.cvsps.logencoding'
524 ' configuration'))
524 ' configuration'))
525 else:
525 else:
526 raise error.Abort(_("no encoding can transcode"
526 raise error.Abort(_("no encoding can transcode"
527 " CVS log message for %s of %s")
527 " CVS log message for %s of %s")
528 % (revstr(entry.revision), entry.file),
528 % (revstr(entry.revision), entry.file),
529 hint=_('check convert.cvsps.logencoding'
529 hint=_('check convert.cvsps.logencoding'
530 ' configuration'))
530 ' configuration'))
531
531
532 hook.hook(ui, None, "cvslog", True, log=log)
532 hook.hook(ui, None, "cvslog", True, log=log)
533
533
534 return log
534 return log
535
535
536
536
537 class changeset(object):
537 class changeset(object):
538 '''Class changeset has the following attributes:
538 '''Class changeset has the following attributes:
539 .id - integer identifying this changeset (list index)
539 .id - integer identifying this changeset (list index)
540 .author - author name as CVS knows it
540 .author - author name as CVS knows it
541 .branch - name of branch this changeset is on, or None
541 .branch - name of branch this changeset is on, or None
542 .comment - commit message
542 .comment - commit message
543 .commitid - CVS commitid or None
543 .commitid - CVS commitid or None
544 .date - the commit date as a (time,tz) tuple
544 .date - the commit date as a (time,tz) tuple
545 .entries - list of logentry objects in this changeset
545 .entries - list of logentry objects in this changeset
546 .parents - list of one or two parent changesets
546 .parents - list of one or two parent changesets
547 .tags - list of tags on this changeset
547 .tags - list of tags on this changeset
548 .synthetic - from synthetic revision "file ... added on branch ..."
548 .synthetic - from synthetic revision "file ... added on branch ..."
549 .mergepoint- the branch that has been merged from or None
549 .mergepoint- the branch that has been merged from or None
550 .branchpoints- the branches that start at the current entry or empty
550 .branchpoints- the branches that start at the current entry or empty
551 '''
551 '''
552 def __init__(self, **entries):
552 def __init__(self, **entries):
553 self.id = None
553 self.id = None
554 self.synthetic = False
554 self.synthetic = False
555 self.__dict__.update(entries)
555 self.__dict__.update(entries)
556
556
557 def __repr__(self):
557 def __repr__(self):
558 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
558 items = ("%s=%r"%(k, self.__dict__[k]) for k in sorted(self.__dict__))
559 return "%s(%s)"%(type(self).__name__, ", ".join(items))
559 return "%s(%s)"%(type(self).__name__, ", ".join(items))
560
560
561 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
561 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
562 '''Convert log into changesets.'''
562 '''Convert log into changesets.'''
563
563
564 ui.status(_('creating changesets\n'))
564 ui.status(_('creating changesets\n'))
565
565
566 # try to order commitids by date
566 # try to order commitids by date
567 mindate = {}
567 mindate = {}
568 for e in log:
568 for e in log:
569 if e.commitid:
569 if e.commitid:
570 if e.commitid not in mindate:
570 if e.commitid not in mindate:
571 mindate[e.commitid] = e.date
571 mindate[e.commitid] = e.date
572 else:
572 else:
573 mindate[e.commitid] = min(e.date, mindate[e.commitid])
573 mindate[e.commitid] = min(e.date, mindate[e.commitid])
574
574
575 # Merge changesets
575 # Merge changesets
576 log.sort(key=lambda x: (mindate.get(x.commitid, (-1, 0)),
576 log.sort(key=lambda x: (mindate.get(x.commitid, (-1, 0)),
577 x.commitid or '', x.comment,
577 x.commitid or '', x.comment,
578 x.author, x.branch or '', x.date, x.branchpoints))
578 x.author, x.branch or '', x.date, x.branchpoints))
579
579
580 changesets = []
580 changesets = []
581 files = set()
581 files = set()
582 c = None
582 c = None
583 for i, e in enumerate(log):
583 for i, e in enumerate(log):
584
584
585 # Check if log entry belongs to the current changeset or not.
585 # Check if log entry belongs to the current changeset or not.
586
586
587 # Since CVS is file-centric, two different file revisions with
587 # Since CVS is file-centric, two different file revisions with
588 # different branchpoints should be treated as belonging to two
588 # different branchpoints should be treated as belonging to two
589 # different changesets (and the ordering is important and not
589 # different changesets (and the ordering is important and not
590 # honoured by cvsps at this point).
590 # honoured by cvsps at this point).
591 #
591 #
592 # Consider the following case:
592 # Consider the following case:
593 # foo 1.1 branchpoints: [MYBRANCH]
593 # foo 1.1 branchpoints: [MYBRANCH]
594 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
594 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
595 #
595 #
596 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
596 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
597 # later version of foo may be in MYBRANCH2, so foo should be the
597 # later version of foo may be in MYBRANCH2, so foo should be the
598 # first changeset and bar the next and MYBRANCH and MYBRANCH2
598 # first changeset and bar the next and MYBRANCH and MYBRANCH2
599 # should both start off of the bar changeset. No provisions are
599 # should both start off of the bar changeset. No provisions are
600 # made to ensure that this is, in fact, what happens.
600 # made to ensure that this is, in fact, what happens.
601 if not (c and e.branchpoints == c.branchpoints and
601 if not (c and e.branchpoints == c.branchpoints and
602 (# cvs commitids
602 (# cvs commitids
603 (e.commitid is not None and e.commitid == c.commitid) or
603 (e.commitid is not None and e.commitid == c.commitid) or
604 (# no commitids, use fuzzy commit detection
604 (# no commitids, use fuzzy commit detection
605 (e.commitid is None or c.commitid is None) and
605 (e.commitid is None or c.commitid is None) and
606 e.comment == c.comment and
606 e.comment == c.comment and
607 e.author == c.author and
607 e.author == c.author and
608 e.branch == c.branch and
608 e.branch == c.branch and
609 ((c.date[0] + c.date[1]) <=
609 ((c.date[0] + c.date[1]) <=
610 (e.date[0] + e.date[1]) <=
610 (e.date[0] + e.date[1]) <=
611 (c.date[0] + c.date[1]) + fuzz) and
611 (c.date[0] + c.date[1]) + fuzz) and
612 e.file not in files))):
612 e.file not in files))):
613 c = changeset(comment=e.comment, author=e.author,
613 c = changeset(comment=e.comment, author=e.author,
614 branch=e.branch, date=e.date,
614 branch=e.branch, date=e.date,
615 entries=[], mergepoint=e.mergepoint,
615 entries=[], mergepoint=e.mergepoint,
616 branchpoints=e.branchpoints, commitid=e.commitid)
616 branchpoints=e.branchpoints, commitid=e.commitid)
617 changesets.append(c)
617 changesets.append(c)
618
618
619 files = set()
619 files = set()
620 if len(changesets) % 100 == 0:
620 if len(changesets) % 100 == 0:
621 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
621 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
622 ui.status(stringutil.ellipsis(t, 80) + '\n')
622 ui.status(stringutil.ellipsis(t, 80) + '\n')
623
623
624 c.entries.append(e)
624 c.entries.append(e)
625 files.add(e.file)
625 files.add(e.file)
626 c.date = e.date # changeset date is date of latest commit in it
626 c.date = e.date # changeset date is date of latest commit in it
627
627
628 # Mark synthetic changesets
628 # Mark synthetic changesets
629
629
630 for c in changesets:
630 for c in changesets:
631 # Synthetic revisions always get their own changeset, because
631 # Synthetic revisions always get their own changeset, because
632 # the log message includes the filename. E.g. if you add file3
632 # the log message includes the filename. E.g. if you add file3
633 # and file4 on a branch, you get four log entries and three
633 # and file4 on a branch, you get four log entries and three
634 # changesets:
634 # changesets:
635 # "File file3 was added on branch ..." (synthetic, 1 entry)
635 # "File file3 was added on branch ..." (synthetic, 1 entry)
636 # "File file4 was added on branch ..." (synthetic, 1 entry)
636 # "File file4 was added on branch ..." (synthetic, 1 entry)
637 # "Add file3 and file4 to fix ..." (real, 2 entries)
637 # "Add file3 and file4 to fix ..." (real, 2 entries)
638 # Hence the check for 1 entry here.
638 # Hence the check for 1 entry here.
639 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
639 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
640
640
641 # Sort files in each changeset
641 # Sort files in each changeset
642
642
643 def entitycompare(l, r):
643 def entitycompare(l, r):
644 'Mimic cvsps sorting order'
644 'Mimic cvsps sorting order'
645 l = l.file.split('/')
645 l = l.file.split('/')
646 r = r.file.split('/')
646 r = r.file.split('/')
647 nl = len(l)
647 nl = len(l)
648 nr = len(r)
648 nr = len(r)
649 n = min(nl, nr)
649 n = min(nl, nr)
650 for i in range(n):
650 for i in range(n):
651 if i + 1 == nl and nl < nr:
651 if i + 1 == nl and nl < nr:
652 return -1
652 return -1
653 elif i + 1 == nr and nl > nr:
653 elif i + 1 == nr and nl > nr:
654 return +1
654 return +1
655 elif l[i] < r[i]:
655 elif l[i] < r[i]:
656 return -1
656 return -1
657 elif l[i] > r[i]:
657 elif l[i] > r[i]:
658 return +1
658 return +1
659 return 0
659 return 0
660
660
661 for c in changesets:
661 for c in changesets:
662 c.entries.sort(key=functools.cmp_to_key(entitycompare))
662 c.entries.sort(key=functools.cmp_to_key(entitycompare))
663
663
664 # Sort changesets by date
664 # Sort changesets by date
665
665
666 odd = set()
666 odd = set()
667 def cscmp(l, r):
667 def cscmp(l, r):
668 d = sum(l.date) - sum(r.date)
668 d = sum(l.date) - sum(r.date)
669 if d:
669 if d:
670 return d
670 return d
671
671
672 # detect vendor branches and initial commits on a branch
672 # detect vendor branches and initial commits on a branch
673 le = {}
673 le = {}
674 for e in l.entries:
674 for e in l.entries:
675 le[e.rcs] = e.revision
675 le[e.rcs] = e.revision
676 re = {}
676 re = {}
677 for e in r.entries:
677 for e in r.entries:
678 re[e.rcs] = e.revision
678 re[e.rcs] = e.revision
679
679
680 d = 0
680 d = 0
681 for e in l.entries:
681 for e in l.entries:
682 if re.get(e.rcs, None) == e.parent:
682 if re.get(e.rcs, None) == e.parent:
683 assert not d
683 assert not d
684 d = 1
684 d = 1
685 break
685 break
686
686
687 for e in r.entries:
687 for e in r.entries:
688 if le.get(e.rcs, None) == e.parent:
688 if le.get(e.rcs, None) == e.parent:
689 if d:
689 if d:
690 odd.add((l, r))
690 odd.add((l, r))
691 d = -1
691 d = -1
692 break
692 break
693 # By this point, the changesets are sufficiently compared that
693 # By this point, the changesets are sufficiently compared that
694 # we don't really care about ordering. However, this leaves
694 # we don't really care about ordering. However, this leaves
695 # some race conditions in the tests, so we compare on the
695 # some race conditions in the tests, so we compare on the
696 # number of files modified, the files contained in each
696 # number of files modified, the files contained in each
697 # changeset, and the branchpoints in the change to ensure test
697 # changeset, and the branchpoints in the change to ensure test
698 # output remains stable.
698 # output remains stable.
699
699
700 # recommended replacement for cmp from
700 # recommended replacement for cmp from
701 # https://docs.python.org/3.0/whatsnew/3.0.html
701 # https://docs.python.org/3.0/whatsnew/3.0.html
702 c = lambda x, y: (x > y) - (x < y)
702 c = lambda x, y: (x > y) - (x < y)
703 # Sort bigger changes first.
703 # Sort bigger changes first.
704 if not d:
704 if not d:
705 d = c(len(l.entries), len(r.entries))
705 d = c(len(l.entries), len(r.entries))
706 # Try sorting by filename in the change.
706 # Try sorting by filename in the change.
707 if not d:
707 if not d:
708 d = c([e.file for e in l.entries], [e.file for e in r.entries])
708 d = c([e.file for e in l.entries], [e.file for e in r.entries])
709 # Try and put changes without a branch point before ones with
709 # Try and put changes without a branch point before ones with
710 # a branch point.
710 # a branch point.
711 if not d:
711 if not d:
712 d = c(len(l.branchpoints), len(r.branchpoints))
712 d = c(len(l.branchpoints), len(r.branchpoints))
713 return d
713 return d
714
714
715 changesets.sort(key=functools.cmp_to_key(cscmp))
715 changesets.sort(key=functools.cmp_to_key(cscmp))
716
716
717 # Collect tags
717 # Collect tags
718
718
719 globaltags = {}
719 globaltags = {}
720 for c in changesets:
720 for c in changesets:
721 for e in c.entries:
721 for e in c.entries:
722 for tag in e.tags:
722 for tag in e.tags:
723 # remember which is the latest changeset to have this tag
723 # remember which is the latest changeset to have this tag
724 globaltags[tag] = c
724 globaltags[tag] = c
725
725
726 for c in changesets:
726 for c in changesets:
727 tags = set()
727 tags = set()
728 for e in c.entries:
728 for e in c.entries:
729 tags.update(e.tags)
729 tags.update(e.tags)
730 # remember tags only if this is the latest changeset to have it
730 # remember tags only if this is the latest changeset to have it
731 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
731 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
732
732
733 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
733 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
734 # by inserting dummy changesets with two parents, and handle
734 # by inserting dummy changesets with two parents, and handle
735 # {{mergefrombranch BRANCHNAME}} by setting two parents.
735 # {{mergefrombranch BRANCHNAME}} by setting two parents.
736
736
737 if mergeto is None:
737 if mergeto is None:
738 mergeto = br'{{mergetobranch ([-\w]+)}}'
738 mergeto = br'{{mergetobranch ([-\w]+)}}'
739 if mergeto:
739 if mergeto:
740 mergeto = re.compile(mergeto)
740 mergeto = re.compile(mergeto)
741
741
742 if mergefrom is None:
742 if mergefrom is None:
743 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
743 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
744 if mergefrom:
744 if mergefrom:
745 mergefrom = re.compile(mergefrom)
745 mergefrom = re.compile(mergefrom)
746
746
747 versions = {} # changeset index where we saw any particular file version
747 versions = {} # changeset index where we saw any particular file version
748 branches = {} # changeset index where we saw a branch
748 branches = {} # changeset index where we saw a branch
749 n = len(changesets)
749 n = len(changesets)
750 i = 0
750 i = 0
751 while i < n:
751 while i < n:
752 c = changesets[i]
752 c = changesets[i]
753
753
754 for f in c.entries:
754 for f in c.entries:
755 versions[(f.rcs, f.revision)] = i
755 versions[(f.rcs, f.revision)] = i
756
756
757 p = None
757 p = None
758 if c.branch in branches:
758 if c.branch in branches:
759 p = branches[c.branch]
759 p = branches[c.branch]
760 else:
760 else:
761 # first changeset on a new branch
761 # first changeset on a new branch
762 # the parent is a changeset with the branch in its
762 # the parent is a changeset with the branch in its
763 # branchpoints such that it is the latest possible
763 # branchpoints such that it is the latest possible
764 # commit without any intervening, unrelated commits.
764 # commit without any intervening, unrelated commits.
765
765
766 for candidate in pycompat.xrange(i):
766 for candidate in pycompat.xrange(i):
767 if c.branch not in changesets[candidate].branchpoints:
767 if c.branch not in changesets[candidate].branchpoints:
768 if p is not None:
768 if p is not None:
769 break
769 break
770 continue
770 continue
771 p = candidate
771 p = candidate
772
772
773 c.parents = []
773 c.parents = []
774 if p is not None:
774 if p is not None:
775 p = changesets[p]
775 p = changesets[p]
776
776
777 # Ensure no changeset has a synthetic changeset as a parent.
777 # Ensure no changeset has a synthetic changeset as a parent.
778 while p.synthetic:
778 while p.synthetic:
779 assert len(p.parents) <= 1, \
779 assert len(p.parents) <= 1, \
780 _('synthetic changeset cannot have multiple parents')
780 _('synthetic changeset cannot have multiple parents')
781 if p.parents:
781 if p.parents:
782 p = p.parents[0]
782 p = p.parents[0]
783 else:
783 else:
784 p = None
784 p = None
785 break
785 break
786
786
787 if p is not None:
787 if p is not None:
788 c.parents.append(p)
788 c.parents.append(p)
789
789
790 if c.mergepoint:
790 if c.mergepoint:
791 if c.mergepoint == 'HEAD':
791 if c.mergepoint == 'HEAD':
792 c.mergepoint = None
792 c.mergepoint = None
793 c.parents.append(changesets[branches[c.mergepoint]])
793 c.parents.append(changesets[branches[c.mergepoint]])
794
794
795 if mergefrom:
795 if mergefrom:
796 m = mergefrom.search(c.comment)
796 m = mergefrom.search(c.comment)
797 if m:
797 if m:
798 m = m.group(1)
798 m = m.group(1)
799 if m == 'HEAD':
799 if m == 'HEAD':
800 m = None
800 m = None
801 try:
801 try:
802 candidate = changesets[branches[m]]
802 candidate = changesets[branches[m]]
803 except KeyError:
803 except KeyError:
804 ui.warn(_("warning: CVS commit message references "
804 ui.warn(_("warning: CVS commit message references "
805 "non-existent branch %r:\n%s\n")
805 "non-existent branch %r:\n%s\n")
806 % (pycompat.bytestr(m), c.comment))
806 % (pycompat.bytestr(m), c.comment))
807 if m in branches and c.branch != m and not candidate.synthetic:
807 if m in branches and c.branch != m and not candidate.synthetic:
808 c.parents.append(candidate)
808 c.parents.append(candidate)
809
809
810 if mergeto:
810 if mergeto:
811 m = mergeto.search(c.comment)
811 m = mergeto.search(c.comment)
812 if m:
812 if m:
813 if m.groups():
813 if m.groups():
814 m = m.group(1)
814 m = m.group(1)
815 if m == 'HEAD':
815 if m == 'HEAD':
816 m = None
816 m = None
817 else:
817 else:
818 m = None # if no group found then merge to HEAD
818 m = None # if no group found then merge to HEAD
819 if m in branches and c.branch != m:
819 if m in branches and c.branch != m:
820 # insert empty changeset for merge
820 # insert empty changeset for merge
821 cc = changeset(
821 cc = changeset(
822 author=c.author, branch=m, date=c.date,
822 author=c.author, branch=m, date=c.date,
823 comment='convert-repo: CVS merge from branch %s'
823 comment='convert-repo: CVS merge from branch %s'
824 % c.branch,
824 % c.branch,
825 entries=[], tags=[],
825 entries=[], tags=[],
826 parents=[changesets[branches[m]], c])
826 parents=[changesets[branches[m]], c])
827 changesets.insert(i + 1, cc)
827 changesets.insert(i + 1, cc)
828 branches[m] = i + 1
828 branches[m] = i + 1
829
829
830 # adjust our loop counters now we have inserted a new entry
830 # adjust our loop counters now we have inserted a new entry
831 n += 1
831 n += 1
832 i += 2
832 i += 2
833 continue
833 continue
834
834
835 branches[c.branch] = i
835 branches[c.branch] = i
836 i += 1
836 i += 1
837
837
838 # Drop synthetic changesets (safe now that we have ensured no other
838 # Drop synthetic changesets (safe now that we have ensured no other
839 # changesets can have them as parents).
839 # changesets can have them as parents).
840 i = 0
840 i = 0
841 while i < len(changesets):
841 while i < len(changesets):
842 if changesets[i].synthetic:
842 if changesets[i].synthetic:
843 del changesets[i]
843 del changesets[i]
844 else:
844 else:
845 i += 1
845 i += 1
846
846
847 # Number changesets
847 # Number changesets
848
848
849 for i, c in enumerate(changesets):
849 for i, c in enumerate(changesets):
850 c.id = i + 1
850 c.id = i + 1
851
851
852 if odd:
852 if odd:
853 for l, r in odd:
853 for l, r in odd:
854 if l.id is not None and r.id is not None:
854 if l.id is not None and r.id is not None:
855 ui.warn(_('changeset %d is both before and after %d\n')
855 ui.warn(_('changeset %d is both before and after %d\n')
856 % (l.id, r.id))
856 % (l.id, r.id))
857
857
858 ui.status(_('%d changeset entries\n') % len(changesets))
858 ui.status(_('%d changeset entries\n') % len(changesets))
859
859
860 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
860 hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
861
861
862 return changesets
862 return changesets
863
863
864
864
865 def debugcvsps(ui, *args, **opts):
865 def debugcvsps(ui, *args, **opts):
866 '''Read CVS rlog for current directory or named path in
866 '''Read CVS rlog for current directory or named path in
867 repository, and convert the log to changesets based on matching
867 repository, and convert the log to changesets based on matching
868 commit log entries and dates.
868 commit log entries and dates.
869 '''
869 '''
870 opts = pycompat.byteskwargs(opts)
870 opts = pycompat.byteskwargs(opts)
871 if opts["new_cache"]:
871 if opts["new_cache"]:
872 cache = "write"
872 cache = "write"
873 elif opts["update_cache"]:
873 elif opts["update_cache"]:
874 cache = "update"
874 cache = "update"
875 else:
875 else:
876 cache = None
876 cache = None
877
877
878 revisions = opts["revisions"]
878 revisions = opts["revisions"]
879
879
880 try:
880 try:
881 if args:
881 if args:
882 log = []
882 log = []
883 for d in args:
883 for d in args:
884 log += createlog(ui, d, root=opts["root"], cache=cache)
884 log += createlog(ui, d, root=opts["root"], cache=cache)
885 else:
885 else:
886 log = createlog(ui, root=opts["root"], cache=cache)
886 log = createlog(ui, root=opts["root"], cache=cache)
887 except logerror as e:
887 except logerror as e:
888 ui.write("%r\n"%e)
888 ui.write("%r\n"%e)
889 return
889 return
890
890
891 changesets = createchangeset(ui, log, opts["fuzz"])
891 changesets = createchangeset(ui, log, opts["fuzz"])
892 del log
892 del log
893
893
894 # Print changesets (optionally filtered)
894 # Print changesets (optionally filtered)
895
895
896 off = len(revisions)
896 off = len(revisions)
897 branches = {} # latest version number in each branch
897 branches = {} # latest version number in each branch
898 ancestors = {} # parent branch
898 ancestors = {} # parent branch
899 for cs in changesets:
899 for cs in changesets:
900
900
901 if opts["ancestors"]:
901 if opts["ancestors"]:
902 if cs.branch not in branches and cs.parents and cs.parents[0].id:
902 if cs.branch not in branches and cs.parents and cs.parents[0].id:
903 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
903 ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
904 cs.parents[0].id)
904 cs.parents[0].id)
905 branches[cs.branch] = cs.id
905 branches[cs.branch] = cs.id
906
906
907 # limit by branches
907 # limit by branches
908 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
908 if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
909 continue
909 continue
910
910
911 if not off:
911 if not off:
912 # Note: trailing spaces on several lines here are needed to have
912 # Note: trailing spaces on several lines here are needed to have
913 # bug-for-bug compatibility with cvsps.
913 # bug-for-bug compatibility with cvsps.
914 ui.write('---------------------\n')
914 ui.write('---------------------\n')
915 ui.write(('PatchSet %d \n' % cs.id))
915 ui.write(('PatchSet %d \n' % cs.id))
916 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
916 ui.write(('Date: %s\n' % dateutil.datestr(cs.date,
917 '%Y/%m/%d %H:%M:%S %1%2')))
917 '%Y/%m/%d %H:%M:%S %1%2')))
918 ui.write(('Author: %s\n' % cs.author))
918 ui.write(('Author: %s\n' % cs.author))
919 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
919 ui.write(('Branch: %s\n' % (cs.branch or 'HEAD')))
920 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
920 ui.write(('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
921 ','.join(cs.tags) or '(none)')))
921 ','.join(cs.tags) or '(none)')))
922 if cs.branchpoints:
922 if cs.branchpoints:
923 ui.write(('Branchpoints: %s \n') %
923 ui.write(('Branchpoints: %s \n') %
924 ', '.join(sorted(cs.branchpoints)))
924 ', '.join(sorted(cs.branchpoints)))
925 if opts["parents"] and cs.parents:
925 if opts["parents"] and cs.parents:
926 if len(cs.parents) > 1:
926 if len(cs.parents) > 1:
927 ui.write(('Parents: %s\n' %
927 ui.write(('Parents: %s\n' %
928 (','.join([(b"%d" % p.id) for p in cs.parents]))))
928 (','.join([(b"%d" % p.id) for p in cs.parents]))))
929 else:
929 else:
930 ui.write(('Parent: %d\n' % cs.parents[0].id))
930 ui.write(('Parent: %d\n' % cs.parents[0].id))
931
931
932 if opts["ancestors"]:
932 if opts["ancestors"]:
933 b = cs.branch
933 b = cs.branch
934 r = []
934 r = []
935 while b:
935 while b:
936 b, c = ancestors[b]
936 b, c = ancestors[b]
937 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
937 r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
938 if r:
938 if r:
939 ui.write(('Ancestors: %s\n' % (','.join(r))))
939 ui.write(('Ancestors: %s\n' % (','.join(r))))
940
940
941 ui.write(('Log:\n'))
941 ui.write(('Log:\n'))
942 ui.write('%s\n\n' % cs.comment)
942 ui.write('%s\n\n' % cs.comment)
943 ui.write(('Members: \n'))
943 ui.write(('Members: \n'))
944 for f in cs.entries:
944 for f in cs.entries:
945 fn = f.file
945 fn = f.file
946 if fn.startswith(opts["prefix"]):
946 if fn.startswith(opts["prefix"]):
947 fn = fn[len(opts["prefix"]):]
947 fn = fn[len(opts["prefix"]):]
948 ui.write('\t%s:%s->%s%s \n' % (
948 ui.write('\t%s:%s->%s%s \n' % (
949 fn,
949 fn,
950 '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
950 '.'.join([b"%d" % x for x in f.parent]) or 'INITIAL',
951 '.'.join([(b"%d" % x) for x in f.revision]),
951 '.'.join([(b"%d" % x) for x in f.revision]),
952 ['', '(DEAD)'][f.dead]))
952 ['', '(DEAD)'][f.dead]))
953 ui.write('\n')
953 ui.write('\n')
954
954
955 # have we seen the start tag?
955 # have we seen the start tag?
956 if revisions and off:
956 if revisions and off:
957 if revisions[0] == (b"%d" % cs.id) or \
957 if revisions[0] == (b"%d" % cs.id) or \
958 revisions[0] in cs.tags:
958 revisions[0] in cs.tags:
959 off = False
959 off = False
960
960
961 # see if we reached the end tag
961 # see if we reached the end tag
962 if len(revisions) > 1 and not off:
962 if len(revisions) > 1 and not off:
963 if revisions[1] == (b"%d" % cs.id) or \
963 if revisions[1] == (b"%d" % cs.id) or \
964 revisions[1] in cs.tags:
964 revisions[1] in cs.tags:
965 break
965 break
@@ -1,3701 +1,3701
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help COMMAND` for more details)::
17 Common tasks (use :hg:`help COMMAND` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behavior can be configured with::
31 files creations or deletions. This behavior can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 It may be desirable for mq changesets to be kept in the secret phase (see
41 It may be desirable for mq changesets to be kept in the secret phase (see
42 :hg:`help phases`), which can be enabled with the following setting::
42 :hg:`help phases`), which can be enabled with the following setting::
43
43
44 [mq]
44 [mq]
45 secret = True
45 secret = True
46
46
47 You will by default be managing a patch queue named "patches". You can
47 You will by default be managing a patch queue named "patches". You can
48 create other, independent patch queues with the :hg:`qqueue` command.
48 create other, independent patch queues with the :hg:`qqueue` command.
49
49
50 If the working directory contains uncommitted files, qpush, qpop and
50 If the working directory contains uncommitted files, qpush, qpop and
51 qgoto abort immediately. If -f/--force is used, the changes are
51 qgoto abort immediately. If -f/--force is used, the changes are
52 discarded. Setting::
52 discarded. Setting::
53
53
54 [mq]
54 [mq]
55 keepchanges = True
55 keepchanges = True
56
56
57 make them behave as if --keep-changes were passed, and non-conflicting
57 make them behave as if --keep-changes were passed, and non-conflicting
58 local changes will be tolerated and preserved. If incompatible options
58 local changes will be tolerated and preserved. If incompatible options
59 such as -f/--force or --exact are passed, this setting is ignored.
59 such as -f/--force or --exact are passed, this setting is ignored.
60
60
61 This extension used to provide a strip command. This command now lives
61 This extension used to provide a strip command. This command now lives
62 in the strip extension.
62 in the strip extension.
63 '''
63 '''
64
64
65 from __future__ import absolute_import, print_function
65 from __future__ import absolute_import, print_function
66
66
67 import errno
67 import errno
68 import os
68 import os
69 import re
69 import re
70 import shutil
70 import shutil
71 from mercurial.i18n import _
71 from mercurial.i18n import _
72 from mercurial.node import (
72 from mercurial.node import (
73 bin,
73 bin,
74 hex,
74 hex,
75 nullid,
75 nullid,
76 nullrev,
76 nullrev,
77 short,
77 short,
78 )
78 )
79 from mercurial import (
79 from mercurial import (
80 cmdutil,
80 cmdutil,
81 commands,
81 commands,
82 dirstateguard,
82 dirstateguard,
83 encoding,
83 encoding,
84 error,
84 error,
85 extensions,
85 extensions,
86 hg,
86 hg,
87 localrepo,
87 localrepo,
88 lock as lockmod,
88 lock as lockmod,
89 logcmdutil,
89 logcmdutil,
90 patch as patchmod,
90 patch as patchmod,
91 phases,
91 phases,
92 pycompat,
92 pycompat,
93 registrar,
93 registrar,
94 revsetlang,
94 revsetlang,
95 scmutil,
95 scmutil,
96 smartset,
96 smartset,
97 subrepoutil,
97 subrepoutil,
98 util,
98 util,
99 vfs as vfsmod,
99 vfs as vfsmod,
100 )
100 )
101 from mercurial.utils import (
101 from mercurial.utils import (
102 dateutil,
102 dateutil,
103 stringutil,
103 stringutil,
104 )
104 )
105
105
106 release = lockmod.release
106 release = lockmod.release
107 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
107 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
108
108
109 cmdtable = {}
109 cmdtable = {}
110 command = registrar.command(cmdtable)
110 command = registrar.command(cmdtable)
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
111 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
112 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
113 # be specifying the version(s) of Mercurial they are tested with, or
113 # be specifying the version(s) of Mercurial they are tested with, or
114 # leave the attribute unspecified.
114 # leave the attribute unspecified.
115 testedwith = 'ships-with-hg-core'
115 testedwith = 'ships-with-hg-core'
116
116
117 configtable = {}
117 configtable = {}
118 configitem = registrar.configitem(configtable)
118 configitem = registrar.configitem(configtable)
119
119
120 configitem('mq', 'git',
120 configitem('mq', 'git',
121 default='auto',
121 default='auto',
122 )
122 )
123 configitem('mq', 'keepchanges',
123 configitem('mq', 'keepchanges',
124 default=False,
124 default=False,
125 )
125 )
126 configitem('mq', 'plain',
126 configitem('mq', 'plain',
127 default=False,
127 default=False,
128 )
128 )
129 configitem('mq', 'secret',
129 configitem('mq', 'secret',
130 default=False,
130 default=False,
131 )
131 )
132
132
133 # force load strip extension formerly included in mq and import some utility
133 # force load strip extension formerly included in mq and import some utility
134 try:
134 try:
135 stripext = extensions.find('strip')
135 stripext = extensions.find('strip')
136 except KeyError:
136 except KeyError:
137 # note: load is lazy so we could avoid the try-except,
137 # note: load is lazy so we could avoid the try-except,
138 # but I (marmoute) prefer this explicit code.
138 # but I (marmoute) prefer this explicit code.
139 class dummyui(object):
139 class dummyui(object):
140 def debug(self, msg):
140 def debug(self, msg):
141 pass
141 pass
142 def log(self, event, msgfmt, *msgargs, **opts):
142 def log(self, event, msgfmt, *msgargs, **opts):
143 pass
143 pass
144 stripext = extensions.load(dummyui(), 'strip', '')
144 stripext = extensions.load(dummyui(), 'strip', '')
145
145
146 strip = stripext.strip
146 strip = stripext.strip
147 checksubstate = stripext.checksubstate
147 checksubstate = stripext.checksubstate
148 checklocalchanges = stripext.checklocalchanges
148 checklocalchanges = stripext.checklocalchanges
149
149
150
150
151 # Patch names looks like unix-file names.
151 # Patch names looks like unix-file names.
152 # They must be joinable with queue directory and result in the patch path.
152 # They must be joinable with queue directory and result in the patch path.
153 normname = util.normpath
153 normname = util.normpath
154
154
155 class statusentry(object):
155 class statusentry(object):
156 def __init__(self, node, name):
156 def __init__(self, node, name):
157 self.node, self.name = node, name
157 self.node, self.name = node, name
158
158
159 def __bytes__(self):
159 def __bytes__(self):
160 return hex(self.node) + ':' + self.name
160 return hex(self.node) + ':' + self.name
161
161
162 __str__ = encoding.strmethod(__bytes__)
162 __str__ = encoding.strmethod(__bytes__)
163 __repr__ = encoding.strmethod(__bytes__)
163 __repr__ = encoding.strmethod(__bytes__)
164
164
165 # The order of the headers in 'hg export' HG patches:
165 # The order of the headers in 'hg export' HG patches:
166 HGHEADERS = [
166 HGHEADERS = [
167 # '# HG changeset patch',
167 # '# HG changeset patch',
168 '# User ',
168 '# User ',
169 '# Date ',
169 '# Date ',
170 '# ',
170 '# ',
171 '# Branch ',
171 '# Branch ',
172 '# Node ID ',
172 '# Node ID ',
173 '# Parent ', # can occur twice for merges - but that is not relevant for mq
173 '# Parent ', # can occur twice for merges - but that is not relevant for mq
174 ]
174 ]
175 # The order of headers in plain 'mail style' patches:
175 # The order of headers in plain 'mail style' patches:
176 PLAINHEADERS = {
176 PLAINHEADERS = {
177 'from': 0,
177 'from': 0,
178 'date': 1,
178 'date': 1,
179 'subject': 2,
179 'subject': 2,
180 }
180 }
181
181
182 def inserthgheader(lines, header, value):
182 def inserthgheader(lines, header, value):
183 """Assuming lines contains a HG patch header, add a header line with value.
183 """Assuming lines contains a HG patch header, add a header line with value.
184 >>> try: inserthgheader([], b'# Date ', b'z')
184 >>> try: inserthgheader([], b'# Date ', b'z')
185 ... except ValueError as inst: print("oops")
185 ... except ValueError as inst: print("oops")
186 oops
186 oops
187 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
187 >>> inserthgheader([b'# HG changeset patch'], b'# Date ', b'z')
188 ['# HG changeset patch', '# Date z']
188 ['# HG changeset patch', '# Date z']
189 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
189 >>> inserthgheader([b'# HG changeset patch', b''], b'# Date ', b'z')
190 ['# HG changeset patch', '# Date z', '']
190 ['# HG changeset patch', '# Date z', '']
191 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
191 >>> inserthgheader([b'# HG changeset patch', b'# User y'], b'# Date ', b'z')
192 ['# HG changeset patch', '# User y', '# Date z']
192 ['# HG changeset patch', '# User y', '# Date z']
193 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
193 >>> inserthgheader([b'# HG changeset patch', b'# Date x', b'# User y'],
194 ... b'# User ', b'z')
194 ... b'# User ', b'z')
195 ['# HG changeset patch', '# Date x', '# User z']
195 ['# HG changeset patch', '# Date x', '# User z']
196 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
196 >>> inserthgheader([b'# HG changeset patch', b'# Date y'], b'# Date ', b'z')
197 ['# HG changeset patch', '# Date z']
197 ['# HG changeset patch', '# Date z']
198 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
198 >>> inserthgheader([b'# HG changeset patch', b'', b'# Date y'],
199 ... b'# Date ', b'z')
199 ... b'# Date ', b'z')
200 ['# HG changeset patch', '# Date z', '', '# Date y']
200 ['# HG changeset patch', '# Date z', '', '# Date y']
201 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
201 >>> inserthgheader([b'# HG changeset patch', b'# Parent y'],
202 ... b'# Date ', b'z')
202 ... b'# Date ', b'z')
203 ['# HG changeset patch', '# Date z', '# Parent y']
203 ['# HG changeset patch', '# Date z', '# Parent y']
204 """
204 """
205 start = lines.index('# HG changeset patch') + 1
205 start = lines.index('# HG changeset patch') + 1
206 newindex = HGHEADERS.index(header)
206 newindex = HGHEADERS.index(header)
207 bestpos = len(lines)
207 bestpos = len(lines)
208 for i in range(start, len(lines)):
208 for i in range(start, len(lines)):
209 line = lines[i]
209 line = lines[i]
210 if not line.startswith('# '):
210 if not line.startswith('# '):
211 bestpos = min(bestpos, i)
211 bestpos = min(bestpos, i)
212 break
212 break
213 for lineindex, h in enumerate(HGHEADERS):
213 for lineindex, h in enumerate(HGHEADERS):
214 if line.startswith(h):
214 if line.startswith(h):
215 if lineindex == newindex:
215 if lineindex == newindex:
216 lines[i] = header + value
216 lines[i] = header + value
217 return lines
217 return lines
218 if lineindex > newindex:
218 if lineindex > newindex:
219 bestpos = min(bestpos, i)
219 bestpos = min(bestpos, i)
220 break # next line
220 break # next line
221 lines.insert(bestpos, header + value)
221 lines.insert(bestpos, header + value)
222 return lines
222 return lines
223
223
224 def insertplainheader(lines, header, value):
224 def insertplainheader(lines, header, value):
225 """For lines containing a plain patch header, add a header line with value.
225 """For lines containing a plain patch header, add a header line with value.
226 >>> insertplainheader([], b'Date', b'z')
226 >>> insertplainheader([], b'Date', b'z')
227 ['Date: z']
227 ['Date: z']
228 >>> insertplainheader([b''], b'Date', b'z')
228 >>> insertplainheader([b''], b'Date', b'z')
229 ['Date: z', '']
229 ['Date: z', '']
230 >>> insertplainheader([b'x'], b'Date', b'z')
230 >>> insertplainheader([b'x'], b'Date', b'z')
231 ['Date: z', '', 'x']
231 ['Date: z', '', 'x']
232 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
232 >>> insertplainheader([b'From: y', b'x'], b'Date', b'z')
233 ['From: y', 'Date: z', '', 'x']
233 ['From: y', 'Date: z', '', 'x']
234 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
234 >>> insertplainheader([b' date : x', b' from : y', b''], b'From', b'z')
235 [' date : x', 'From: z', '']
235 [' date : x', 'From: z', '']
236 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
236 >>> insertplainheader([b'', b'Date: y'], b'Date', b'z')
237 ['Date: z', '', 'Date: y']
237 ['Date: z', '', 'Date: y']
238 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
238 >>> insertplainheader([b'foo: bar', b'DATE: z', b'x'], b'From', b'y')
239 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
239 ['From: y', 'foo: bar', 'DATE: z', '', 'x']
240 """
240 """
241 newprio = PLAINHEADERS[header.lower()]
241 newprio = PLAINHEADERS[header.lower()]
242 bestpos = len(lines)
242 bestpos = len(lines)
243 for i, line in enumerate(lines):
243 for i, line in enumerate(lines):
244 if ':' in line:
244 if ':' in line:
245 lheader = line.split(':', 1)[0].strip().lower()
245 lheader = line.split(':', 1)[0].strip().lower()
246 lprio = PLAINHEADERS.get(lheader, newprio + 1)
246 lprio = PLAINHEADERS.get(lheader, newprio + 1)
247 if lprio == newprio:
247 if lprio == newprio:
248 lines[i] = '%s: %s' % (header, value)
248 lines[i] = '%s: %s' % (header, value)
249 return lines
249 return lines
250 if lprio > newprio and i < bestpos:
250 if lprio > newprio and i < bestpos:
251 bestpos = i
251 bestpos = i
252 else:
252 else:
253 if line:
253 if line:
254 lines.insert(i, '')
254 lines.insert(i, '')
255 if i < bestpos:
255 if i < bestpos:
256 bestpos = i
256 bestpos = i
257 break
257 break
258 lines.insert(bestpos, '%s: %s' % (header, value))
258 lines.insert(bestpos, '%s: %s' % (header, value))
259 return lines
259 return lines
260
260
261 class patchheader(object):
261 class patchheader(object):
262 def __init__(self, pf, plainmode=False):
262 def __init__(self, pf, plainmode=False):
263 def eatdiff(lines):
263 def eatdiff(lines):
264 while lines:
264 while lines:
265 l = lines[-1]
265 l = lines[-1]
266 if (l.startswith("diff -") or
266 if (l.startswith("diff -") or
267 l.startswith("Index:") or
267 l.startswith("Index:") or
268 l.startswith("===========")):
268 l.startswith("===========")):
269 del lines[-1]
269 del lines[-1]
270 else:
270 else:
271 break
271 break
272 def eatempty(lines):
272 def eatempty(lines):
273 while lines:
273 while lines:
274 if not lines[-1].strip():
274 if not lines[-1].strip():
275 del lines[-1]
275 del lines[-1]
276 else:
276 else:
277 break
277 break
278
278
279 message = []
279 message = []
280 comments = []
280 comments = []
281 user = None
281 user = None
282 date = None
282 date = None
283 parent = None
283 parent = None
284 format = None
284 format = None
285 subject = None
285 subject = None
286 branch = None
286 branch = None
287 nodeid = None
287 nodeid = None
288 diffstart = 0
288 diffstart = 0
289
289
290 for line in open(pf, 'rb'):
290 for line in open(pf, 'rb'):
291 line = line.rstrip()
291 line = line.rstrip()
292 if (line.startswith('diff --git')
292 if (line.startswith('diff --git')
293 or (diffstart and line.startswith('+++ '))):
293 or (diffstart and line.startswith('+++ '))):
294 diffstart = 2
294 diffstart = 2
295 break
295 break
296 diffstart = 0 # reset
296 diffstart = 0 # reset
297 if line.startswith("--- "):
297 if line.startswith("--- "):
298 diffstart = 1
298 diffstart = 1
299 continue
299 continue
300 elif format == "hgpatch":
300 elif format == "hgpatch":
301 # parse values when importing the result of an hg export
301 # parse values when importing the result of an hg export
302 if line.startswith("# User "):
302 if line.startswith("# User "):
303 user = line[7:]
303 user = line[7:]
304 elif line.startswith("# Date "):
304 elif line.startswith("# Date "):
305 date = line[7:]
305 date = line[7:]
306 elif line.startswith("# Parent "):
306 elif line.startswith("# Parent "):
307 parent = line[9:].lstrip() # handle double trailing space
307 parent = line[9:].lstrip() # handle double trailing space
308 elif line.startswith("# Branch "):
308 elif line.startswith("# Branch "):
309 branch = line[9:]
309 branch = line[9:]
310 elif line.startswith("# Node ID "):
310 elif line.startswith("# Node ID "):
311 nodeid = line[10:]
311 nodeid = line[10:]
312 elif not line.startswith("# ") and line:
312 elif not line.startswith("# ") and line:
313 message.append(line)
313 message.append(line)
314 format = None
314 format = None
315 elif line == '# HG changeset patch':
315 elif line == '# HG changeset patch':
316 message = []
316 message = []
317 format = "hgpatch"
317 format = "hgpatch"
318 elif (format != "tagdone" and (line.startswith("Subject: ") or
318 elif (format != "tagdone" and (line.startswith("Subject: ") or
319 line.startswith("subject: "))):
319 line.startswith("subject: "))):
320 subject = line[9:]
320 subject = line[9:]
321 format = "tag"
321 format = "tag"
322 elif (format != "tagdone" and (line.startswith("From: ") or
322 elif (format != "tagdone" and (line.startswith("From: ") or
323 line.startswith("from: "))):
323 line.startswith("from: "))):
324 user = line[6:]
324 user = line[6:]
325 format = "tag"
325 format = "tag"
326 elif (format != "tagdone" and (line.startswith("Date: ") or
326 elif (format != "tagdone" and (line.startswith("Date: ") or
327 line.startswith("date: "))):
327 line.startswith("date: "))):
328 date = line[6:]
328 date = line[6:]
329 format = "tag"
329 format = "tag"
330 elif format == "tag" and line == "":
330 elif format == "tag" and line == "":
331 # when looking for tags (subject: from: etc) they
331 # when looking for tags (subject: from: etc) they
332 # end once you find a blank line in the source
332 # end once you find a blank line in the source
333 format = "tagdone"
333 format = "tagdone"
334 elif message or line:
334 elif message or line:
335 message.append(line)
335 message.append(line)
336 comments.append(line)
336 comments.append(line)
337
337
338 eatdiff(message)
338 eatdiff(message)
339 eatdiff(comments)
339 eatdiff(comments)
340 # Remember the exact starting line of the patch diffs before consuming
340 # Remember the exact starting line of the patch diffs before consuming
341 # empty lines, for external use by TortoiseHg and others
341 # empty lines, for external use by TortoiseHg and others
342 self.diffstartline = len(comments)
342 self.diffstartline = len(comments)
343 eatempty(message)
343 eatempty(message)
344 eatempty(comments)
344 eatempty(comments)
345
345
346 # make sure message isn't empty
346 # make sure message isn't empty
347 if format and format.startswith("tag") and subject:
347 if format and format.startswith("tag") and subject:
348 message.insert(0, subject)
348 message.insert(0, subject)
349
349
350 self.message = message
350 self.message = message
351 self.comments = comments
351 self.comments = comments
352 self.user = user
352 self.user = user
353 self.date = date
353 self.date = date
354 self.parent = parent
354 self.parent = parent
355 # nodeid and branch are for external use by TortoiseHg and others
355 # nodeid and branch are for external use by TortoiseHg and others
356 self.nodeid = nodeid
356 self.nodeid = nodeid
357 self.branch = branch
357 self.branch = branch
358 self.haspatch = diffstart > 1
358 self.haspatch = diffstart > 1
359 self.plainmode = (plainmode or
359 self.plainmode = (plainmode or
360 '# HG changeset patch' not in self.comments and
360 '# HG changeset patch' not in self.comments and
361 any(c.startswith('Date: ') or
361 any(c.startswith('Date: ') or
362 c.startswith('From: ')
362 c.startswith('From: ')
363 for c in self.comments))
363 for c in self.comments))
364
364
365 def setuser(self, user):
365 def setuser(self, user):
366 try:
366 try:
367 inserthgheader(self.comments, '# User ', user)
367 inserthgheader(self.comments, '# User ', user)
368 except ValueError:
368 except ValueError:
369 if self.plainmode:
369 if self.plainmode:
370 insertplainheader(self.comments, 'From', user)
370 insertplainheader(self.comments, 'From', user)
371 else:
371 else:
372 tmp = ['# HG changeset patch', '# User ' + user]
372 tmp = ['# HG changeset patch', '# User ' + user]
373 self.comments = tmp + self.comments
373 self.comments = tmp + self.comments
374 self.user = user
374 self.user = user
375
375
376 def setdate(self, date):
376 def setdate(self, date):
377 try:
377 try:
378 inserthgheader(self.comments, '# Date ', date)
378 inserthgheader(self.comments, '# Date ', date)
379 except ValueError:
379 except ValueError:
380 if self.plainmode:
380 if self.plainmode:
381 insertplainheader(self.comments, 'Date', date)
381 insertplainheader(self.comments, 'Date', date)
382 else:
382 else:
383 tmp = ['# HG changeset patch', '# Date ' + date]
383 tmp = ['# HG changeset patch', '# Date ' + date]
384 self.comments = tmp + self.comments
384 self.comments = tmp + self.comments
385 self.date = date
385 self.date = date
386
386
387 def setparent(self, parent):
387 def setparent(self, parent):
388 try:
388 try:
389 inserthgheader(self.comments, '# Parent ', parent)
389 inserthgheader(self.comments, '# Parent ', parent)
390 except ValueError:
390 except ValueError:
391 if not self.plainmode:
391 if not self.plainmode:
392 tmp = ['# HG changeset patch', '# Parent ' + parent]
392 tmp = ['# HG changeset patch', '# Parent ' + parent]
393 self.comments = tmp + self.comments
393 self.comments = tmp + self.comments
394 self.parent = parent
394 self.parent = parent
395
395
396 def setmessage(self, message):
396 def setmessage(self, message):
397 if self.comments:
397 if self.comments:
398 self._delmsg()
398 self._delmsg()
399 self.message = [message]
399 self.message = [message]
400 if message:
400 if message:
401 if self.plainmode and self.comments and self.comments[-1]:
401 if self.plainmode and self.comments and self.comments[-1]:
402 self.comments.append('')
402 self.comments.append('')
403 self.comments.append(message)
403 self.comments.append(message)
404
404
405 def __bytes__(self):
405 def __bytes__(self):
406 s = '\n'.join(self.comments).rstrip()
406 s = '\n'.join(self.comments).rstrip()
407 if not s:
407 if not s:
408 return ''
408 return ''
409 return s + '\n\n'
409 return s + '\n\n'
410
410
411 __str__ = encoding.strmethod(__bytes__)
411 __str__ = encoding.strmethod(__bytes__)
412
412
413 def _delmsg(self):
413 def _delmsg(self):
414 '''Remove existing message, keeping the rest of the comments fields.
414 '''Remove existing message, keeping the rest of the comments fields.
415 If comments contains 'subject: ', message will prepend
415 If comments contains 'subject: ', message will prepend
416 the field and a blank line.'''
416 the field and a blank line.'''
417 if self.message:
417 if self.message:
418 subj = 'subject: ' + self.message[0].lower()
418 subj = 'subject: ' + self.message[0].lower()
419 for i in pycompat.xrange(len(self.comments)):
419 for i in pycompat.xrange(len(self.comments)):
420 if subj == self.comments[i].lower():
420 if subj == self.comments[i].lower():
421 del self.comments[i]
421 del self.comments[i]
422 self.message = self.message[2:]
422 self.message = self.message[2:]
423 break
423 break
424 ci = 0
424 ci = 0
425 for mi in self.message:
425 for mi in self.message:
426 while mi != self.comments[ci]:
426 while mi != self.comments[ci]:
427 ci += 1
427 ci += 1
428 del self.comments[ci]
428 del self.comments[ci]
429
429
430 def newcommit(repo, phase, *args, **kwargs):
430 def newcommit(repo, phase, *args, **kwargs):
431 """helper dedicated to ensure a commit respect mq.secret setting
431 """helper dedicated to ensure a commit respect mq.secret setting
432
432
433 It should be used instead of repo.commit inside the mq source for operation
433 It should be used instead of repo.commit inside the mq source for operation
434 creating new changeset.
434 creating new changeset.
435 """
435 """
436 repo = repo.unfiltered()
436 repo = repo.unfiltered()
437 if phase is None:
437 if phase is None:
438 if repo.ui.configbool('mq', 'secret'):
438 if repo.ui.configbool('mq', 'secret'):
439 phase = phases.secret
439 phase = phases.secret
440 overrides = {('ui', 'allowemptycommit'): True}
440 overrides = {('ui', 'allowemptycommit'): True}
441 if phase is not None:
441 if phase is not None:
442 overrides[('phases', 'new-commit')] = phase
442 overrides[('phases', 'new-commit')] = phase
443 with repo.ui.configoverride(overrides, 'mq'):
443 with repo.ui.configoverride(overrides, 'mq'):
444 repo.ui.setconfig('ui', 'allowemptycommit', True)
444 repo.ui.setconfig('ui', 'allowemptycommit', True)
445 return repo.commit(*args, **kwargs)
445 return repo.commit(*args, **kwargs)
446
446
447 class AbortNoCleanup(error.Abort):
447 class AbortNoCleanup(error.Abort):
448 pass
448 pass
449
449
450 class queue(object):
450 class queue(object):
451 def __init__(self, ui, baseui, path, patchdir=None):
451 def __init__(self, ui, baseui, path, patchdir=None):
452 self.basepath = path
452 self.basepath = path
453 try:
453 try:
454 with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
454 with open(os.path.join(path, 'patches.queue'), r'rb') as fh:
455 cur = fh.read().rstrip()
455 cur = fh.read().rstrip()
456
456
457 if not cur:
457 if not cur:
458 curpath = os.path.join(path, 'patches')
458 curpath = os.path.join(path, 'patches')
459 else:
459 else:
460 curpath = os.path.join(path, 'patches-' + cur)
460 curpath = os.path.join(path, 'patches-' + cur)
461 except IOError:
461 except IOError:
462 curpath = os.path.join(path, 'patches')
462 curpath = os.path.join(path, 'patches')
463 self.path = patchdir or curpath
463 self.path = patchdir or curpath
464 self.opener = vfsmod.vfs(self.path)
464 self.opener = vfsmod.vfs(self.path)
465 self.ui = ui
465 self.ui = ui
466 self.baseui = baseui
466 self.baseui = baseui
467 self.applieddirty = False
467 self.applieddirty = False
468 self.seriesdirty = False
468 self.seriesdirty = False
469 self.added = []
469 self.added = []
470 self.seriespath = "series"
470 self.seriespath = "series"
471 self.statuspath = "status"
471 self.statuspath = "status"
472 self.guardspath = "guards"
472 self.guardspath = "guards"
473 self.activeguards = None
473 self.activeguards = None
474 self.guardsdirty = False
474 self.guardsdirty = False
475 # Handle mq.git as a bool with extended values
475 # Handle mq.git as a bool with extended values
476 gitmode = ui.config('mq', 'git').lower()
476 gitmode = ui.config('mq', 'git').lower()
477 boolmode = stringutil.parsebool(gitmode)
477 boolmode = stringutil.parsebool(gitmode)
478 if boolmode is not None:
478 if boolmode is not None:
479 if boolmode:
479 if boolmode:
480 gitmode = 'yes'
480 gitmode = 'yes'
481 else:
481 else:
482 gitmode = 'no'
482 gitmode = 'no'
483 self.gitmode = gitmode
483 self.gitmode = gitmode
484 # deprecated config: mq.plain
484 # deprecated config: mq.plain
485 self.plainmode = ui.configbool('mq', 'plain')
485 self.plainmode = ui.configbool('mq', 'plain')
486 self.checkapplied = True
486 self.checkapplied = True
487
487
488 @util.propertycache
488 @util.propertycache
489 def applied(self):
489 def applied(self):
490 def parselines(lines):
490 def parselines(lines):
491 for l in lines:
491 for l in lines:
492 entry = l.split(':', 1)
492 entry = l.split(':', 1)
493 if len(entry) > 1:
493 if len(entry) > 1:
494 n, name = entry
494 n, name = entry
495 yield statusentry(bin(n), name)
495 yield statusentry(bin(n), name)
496 elif l.strip():
496 elif l.strip():
497 self.ui.warn(_('malformated mq status line: %s\n') %
497 self.ui.warn(_('malformated mq status line: %s\n') %
498 stringutil.pprint(entry))
498 stringutil.pprint(entry))
499 # else we ignore empty lines
499 # else we ignore empty lines
500 try:
500 try:
501 lines = self.opener.read(self.statuspath).splitlines()
501 lines = self.opener.read(self.statuspath).splitlines()
502 return list(parselines(lines))
502 return list(parselines(lines))
503 except IOError as e:
503 except IOError as e:
504 if e.errno == errno.ENOENT:
504 if e.errno == errno.ENOENT:
505 return []
505 return []
506 raise
506 raise
507
507
508 @util.propertycache
508 @util.propertycache
509 def fullseries(self):
509 def fullseries(self):
510 try:
510 try:
511 return self.opener.read(self.seriespath).splitlines()
511 return self.opener.read(self.seriespath).splitlines()
512 except IOError as e:
512 except IOError as e:
513 if e.errno == errno.ENOENT:
513 if e.errno == errno.ENOENT:
514 return []
514 return []
515 raise
515 raise
516
516
517 @util.propertycache
517 @util.propertycache
518 def series(self):
518 def series(self):
519 self.parseseries()
519 self.parseseries()
520 return self.series
520 return self.series
521
521
522 @util.propertycache
522 @util.propertycache
523 def seriesguards(self):
523 def seriesguards(self):
524 self.parseseries()
524 self.parseseries()
525 return self.seriesguards
525 return self.seriesguards
526
526
527 def invalidate(self):
527 def invalidate(self):
528 for a in 'applied fullseries series seriesguards'.split():
528 for a in 'applied fullseries series seriesguards'.split():
529 if a in self.__dict__:
529 if a in self.__dict__:
530 delattr(self, a)
530 delattr(self, a)
531 self.applieddirty = False
531 self.applieddirty = False
532 self.seriesdirty = False
532 self.seriesdirty = False
533 self.guardsdirty = False
533 self.guardsdirty = False
534 self.activeguards = None
534 self.activeguards = None
535
535
536 def diffopts(self, opts=None, patchfn=None, plain=False):
536 def diffopts(self, opts=None, patchfn=None, plain=False):
537 """Return diff options tweaked for this mq use, possibly upgrading to
537 """Return diff options tweaked for this mq use, possibly upgrading to
538 git format, and possibly plain and without lossy options."""
538 git format, and possibly plain and without lossy options."""
539 diffopts = patchmod.difffeatureopts(self.ui, opts,
539 diffopts = patchmod.difffeatureopts(self.ui, opts,
540 git=True, whitespace=not plain, formatchanging=not plain)
540 git=True, whitespace=not plain, formatchanging=not plain)
541 if self.gitmode == 'auto':
541 if self.gitmode == 'auto':
542 diffopts.upgrade = True
542 diffopts.upgrade = True
543 elif self.gitmode == 'keep':
543 elif self.gitmode == 'keep':
544 pass
544 pass
545 elif self.gitmode in ('yes', 'no'):
545 elif self.gitmode in ('yes', 'no'):
546 diffopts.git = self.gitmode == 'yes'
546 diffopts.git = self.gitmode == 'yes'
547 else:
547 else:
548 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
548 raise error.Abort(_('mq.git option can be auto/keep/yes/no'
549 ' got %s') % self.gitmode)
549 ' got %s') % self.gitmode)
550 if patchfn:
550 if patchfn:
551 diffopts = self.patchopts(diffopts, patchfn)
551 diffopts = self.patchopts(diffopts, patchfn)
552 return diffopts
552 return diffopts
553
553
554 def patchopts(self, diffopts, *patches):
554 def patchopts(self, diffopts, *patches):
555 """Return a copy of input diff options with git set to true if
555 """Return a copy of input diff options with git set to true if
556 referenced patch is a git patch and should be preserved as such.
556 referenced patch is a git patch and should be preserved as such.
557 """
557 """
558 diffopts = diffopts.copy()
558 diffopts = diffopts.copy()
559 if not diffopts.git and self.gitmode == 'keep':
559 if not diffopts.git and self.gitmode == 'keep':
560 for patchfn in patches:
560 for patchfn in patches:
561 patchf = self.opener(patchfn, 'r')
561 patchf = self.opener(patchfn, 'r')
562 # if the patch was a git patch, refresh it as a git patch
562 # if the patch was a git patch, refresh it as a git patch
563 diffopts.git = any(line.startswith('diff --git')
563 diffopts.git = any(line.startswith('diff --git')
564 for line in patchf)
564 for line in patchf)
565 patchf.close()
565 patchf.close()
566 return diffopts
566 return diffopts
567
567
568 def join(self, *p):
568 def join(self, *p):
569 return os.path.join(self.path, *p)
569 return os.path.join(self.path, *p)
570
570
571 def findseries(self, patch):
571 def findseries(self, patch):
572 def matchpatch(l):
572 def matchpatch(l):
573 l = l.split('#', 1)[0]
573 l = l.split('#', 1)[0]
574 return l.strip() == patch
574 return l.strip() == patch
575 for index, l in enumerate(self.fullseries):
575 for index, l in enumerate(self.fullseries):
576 if matchpatch(l):
576 if matchpatch(l):
577 return index
577 return index
578 return None
578 return None
579
579
580 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
580 guard_re = re.compile(br'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
581
581
582 def parseseries(self):
582 def parseseries(self):
583 self.series = []
583 self.series = []
584 self.seriesguards = []
584 self.seriesguards = []
585 for l in self.fullseries:
585 for l in self.fullseries:
586 h = l.find('#')
586 h = l.find('#')
587 if h == -1:
587 if h == -1:
588 patch = l
588 patch = l
589 comment = ''
589 comment = ''
590 elif h == 0:
590 elif h == 0:
591 continue
591 continue
592 else:
592 else:
593 patch = l[:h]
593 patch = l[:h]
594 comment = l[h:]
594 comment = l[h:]
595 patch = patch.strip()
595 patch = patch.strip()
596 if patch:
596 if patch:
597 if patch in self.series:
597 if patch in self.series:
598 raise error.Abort(_('%s appears more than once in %s') %
598 raise error.Abort(_('%s appears more than once in %s') %
599 (patch, self.join(self.seriespath)))
599 (patch, self.join(self.seriespath)))
600 self.series.append(patch)
600 self.series.append(patch)
601 self.seriesguards.append(self.guard_re.findall(comment))
601 self.seriesguards.append(self.guard_re.findall(comment))
602
602
603 def checkguard(self, guard):
603 def checkguard(self, guard):
604 if not guard:
604 if not guard:
605 return _('guard cannot be an empty string')
605 return _('guard cannot be an empty string')
606 bad_chars = '# \t\r\n\f'
606 bad_chars = '# \t\r\n\f'
607 first = guard[0]
607 first = guard[0]
608 if first in '-+':
608 if first in '-+':
609 return (_('guard %r starts with invalid character: %r') %
609 return (_('guard %r starts with invalid character: %r') %
610 (guard, first))
610 (guard, first))
611 for c in bad_chars:
611 for c in bad_chars:
612 if c in guard:
612 if c in guard:
613 return _('invalid character in guard %r: %r') % (guard, c)
613 return _('invalid character in guard %r: %r') % (guard, c)
614
614
615 def setactive(self, guards):
615 def setactive(self, guards):
616 for guard in guards:
616 for guard in guards:
617 bad = self.checkguard(guard)
617 bad = self.checkguard(guard)
618 if bad:
618 if bad:
619 raise error.Abort(bad)
619 raise error.Abort(bad)
620 guards = sorted(set(guards))
620 guards = sorted(set(guards))
621 self.ui.debug('active guards: %s\n' % ' '.join(guards))
621 self.ui.debug('active guards: %s\n' % ' '.join(guards))
622 self.activeguards = guards
622 self.activeguards = guards
623 self.guardsdirty = True
623 self.guardsdirty = True
624
624
625 def active(self):
625 def active(self):
626 if self.activeguards is None:
626 if self.activeguards is None:
627 self.activeguards = []
627 self.activeguards = []
628 try:
628 try:
629 guards = self.opener.read(self.guardspath).split()
629 guards = self.opener.read(self.guardspath).split()
630 except IOError as err:
630 except IOError as err:
631 if err.errno != errno.ENOENT:
631 if err.errno != errno.ENOENT:
632 raise
632 raise
633 guards = []
633 guards = []
634 for i, guard in enumerate(guards):
634 for i, guard in enumerate(guards):
635 bad = self.checkguard(guard)
635 bad = self.checkguard(guard)
636 if bad:
636 if bad:
637 self.ui.warn('%s:%d: %s\n' %
637 self.ui.warn('%s:%d: %s\n' %
638 (self.join(self.guardspath), i + 1, bad))
638 (self.join(self.guardspath), i + 1, bad))
639 else:
639 else:
640 self.activeguards.append(guard)
640 self.activeguards.append(guard)
641 return self.activeguards
641 return self.activeguards
642
642
643 def setguards(self, idx, guards):
643 def setguards(self, idx, guards):
644 for g in guards:
644 for g in guards:
645 if len(g) < 2:
645 if len(g) < 2:
646 raise error.Abort(_('guard %r too short') % g)
646 raise error.Abort(_('guard %r too short') % g)
647 if g[0] not in '-+':
647 if g[0] not in '-+':
648 raise error.Abort(_('guard %r starts with invalid char') % g)
648 raise error.Abort(_('guard %r starts with invalid char') % g)
649 bad = self.checkguard(g[1:])
649 bad = self.checkguard(g[1:])
650 if bad:
650 if bad:
651 raise error.Abort(bad)
651 raise error.Abort(bad)
652 drop = self.guard_re.sub('', self.fullseries[idx])
652 drop = self.guard_re.sub('', self.fullseries[idx])
653 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
653 self.fullseries[idx] = drop + ''.join([' #' + g for g in guards])
654 self.parseseries()
654 self.parseseries()
655 self.seriesdirty = True
655 self.seriesdirty = True
656
656
657 def pushable(self, idx):
657 def pushable(self, idx):
658 if isinstance(idx, bytes):
658 if isinstance(idx, bytes):
659 idx = self.series.index(idx)
659 idx = self.series.index(idx)
660 patchguards = self.seriesguards[idx]
660 patchguards = self.seriesguards[idx]
661 if not patchguards:
661 if not patchguards:
662 return True, None
662 return True, None
663 guards = self.active()
663 guards = self.active()
664 exactneg = [g for g in patchguards
664 exactneg = [g for g in patchguards
665 if g.startswith('-') and g[1:] in guards]
665 if g.startswith('-') and g[1:] in guards]
666 if exactneg:
666 if exactneg:
667 return False, stringutil.pprint(exactneg[0])
667 return False, stringutil.pprint(exactneg[0])
668 pos = [g for g in patchguards if g.startswith('+')]
668 pos = [g for g in patchguards if g.startswith('+')]
669 exactpos = [g for g in pos if g[1:] in guards]
669 exactpos = [g for g in pos if g[1:] in guards]
670 if pos:
670 if pos:
671 if exactpos:
671 if exactpos:
672 return True, stringutil.pprint(exactpos[0])
672 return True, stringutil.pprint(exactpos[0])
673 return False, ' '.join([stringutil.pprint(p) for p in pos])
673 return False, ' '.join([stringutil.pprint(p) for p in pos])
674 return True, ''
674 return True, ''
675
675
676 def explainpushable(self, idx, all_patches=False):
676 def explainpushable(self, idx, all_patches=False):
677 if all_patches:
677 if all_patches:
678 write = self.ui.write
678 write = self.ui.write
679 else:
679 else:
680 write = self.ui.warn
680 write = self.ui.warn
681
681
682 if all_patches or self.ui.verbose:
682 if all_patches or self.ui.verbose:
683 if isinstance(idx, bytes):
683 if isinstance(idx, bytes):
684 idx = self.series.index(idx)
684 idx = self.series.index(idx)
685 pushable, why = self.pushable(idx)
685 pushable, why = self.pushable(idx)
686 if all_patches and pushable:
686 if all_patches and pushable:
687 if why is None:
687 if why is None:
688 write(_('allowing %s - no guards in effect\n') %
688 write(_('allowing %s - no guards in effect\n') %
689 self.series[idx])
689 self.series[idx])
690 else:
690 else:
691 if not why:
691 if not why:
692 write(_('allowing %s - no matching negative guards\n') %
692 write(_('allowing %s - no matching negative guards\n') %
693 self.series[idx])
693 self.series[idx])
694 else:
694 else:
695 write(_('allowing %s - guarded by %s\n') %
695 write(_('allowing %s - guarded by %s\n') %
696 (self.series[idx], why))
696 (self.series[idx], why))
697 if not pushable:
697 if not pushable:
698 if why:
698 if why:
699 write(_('skipping %s - guarded by %s\n') %
699 write(_('skipping %s - guarded by %s\n') %
700 (self.series[idx], why))
700 (self.series[idx], why))
701 else:
701 else:
702 write(_('skipping %s - no matching guards\n') %
702 write(_('skipping %s - no matching guards\n') %
703 self.series[idx])
703 self.series[idx])
704
704
705 def savedirty(self):
705 def savedirty(self):
706 def writelist(items, path):
706 def writelist(items, path):
707 fp = self.opener(path, 'wb')
707 fp = self.opener(path, 'wb')
708 for i in items:
708 for i in items:
709 fp.write("%s\n" % i)
709 fp.write("%s\n" % i)
710 fp.close()
710 fp.close()
711 if self.applieddirty:
711 if self.applieddirty:
712 writelist(map(bytes, self.applied), self.statuspath)
712 writelist(map(bytes, self.applied), self.statuspath)
713 self.applieddirty = False
713 self.applieddirty = False
714 if self.seriesdirty:
714 if self.seriesdirty:
715 writelist(self.fullseries, self.seriespath)
715 writelist(self.fullseries, self.seriespath)
716 self.seriesdirty = False
716 self.seriesdirty = False
717 if self.guardsdirty:
717 if self.guardsdirty:
718 writelist(self.activeguards, self.guardspath)
718 writelist(self.activeguards, self.guardspath)
719 self.guardsdirty = False
719 self.guardsdirty = False
720 if self.added:
720 if self.added:
721 qrepo = self.qrepo()
721 qrepo = self.qrepo()
722 if qrepo:
722 if qrepo:
723 qrepo[None].add(f for f in self.added if f not in qrepo[None])
723 qrepo[None].add(f for f in self.added if f not in qrepo[None])
724 self.added = []
724 self.added = []
725
725
726 def removeundo(self, repo):
726 def removeundo(self, repo):
727 undo = repo.sjoin('undo')
727 undo = repo.sjoin('undo')
728 if not os.path.exists(undo):
728 if not os.path.exists(undo):
729 return
729 return
730 try:
730 try:
731 os.unlink(undo)
731 os.unlink(undo)
732 except OSError as inst:
732 except OSError as inst:
733 self.ui.warn(_('error removing undo: %s\n') %
733 self.ui.warn(_('error removing undo: %s\n') %
734 stringutil.forcebytestr(inst))
734 stringutil.forcebytestr(inst))
735
735
736 def backup(self, repo, files, copy=False):
736 def backup(self, repo, files, copy=False):
737 # backup local changes in --force case
737 # backup local changes in --force case
738 for f in sorted(files):
738 for f in sorted(files):
739 absf = repo.wjoin(f)
739 absf = repo.wjoin(f)
740 if os.path.lexists(absf):
740 if os.path.lexists(absf):
741 self.ui.note(_('saving current version of %s as %s\n') %
741 self.ui.note(_('saving current version of %s as %s\n') %
742 (f, scmutil.origpath(self.ui, repo, f)))
742 (f, scmutil.origpath(self.ui, repo, f)))
743
743
744 absorig = scmutil.origpath(self.ui, repo, absf)
744 absorig = scmutil.origpath(self.ui, repo, absf)
745 if copy:
745 if copy:
746 util.copyfile(absf, absorig)
746 util.copyfile(absf, absorig)
747 else:
747 else:
748 util.rename(absf, absorig)
748 util.rename(absf, absorig)
749
749
750 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
750 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
751 fp=None, changes=None, opts=None):
751 fp=None, changes=None, opts=None):
752 if opts is None:
752 if opts is None:
753 opts = {}
753 opts = {}
754 stat = opts.get('stat')
754 stat = opts.get('stat')
755 m = scmutil.match(repo[node1], files, opts)
755 m = scmutil.match(repo[node1], files, opts)
756 logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
756 logcmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
757 changes, stat, fp)
757 changes, stat, fp)
758
758
759 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
759 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
760 # first try just applying the patch
760 # first try just applying the patch
761 (err, n) = self.apply(repo, [patch], update_status=False,
761 (err, n) = self.apply(repo, [patch], update_status=False,
762 strict=True, merge=rev)
762 strict=True, merge=rev)
763
763
764 if err == 0:
764 if err == 0:
765 return (err, n)
765 return (err, n)
766
766
767 if n is None:
767 if n is None:
768 raise error.Abort(_("apply failed for patch %s") % patch)
768 raise error.Abort(_("apply failed for patch %s") % patch)
769
769
770 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
770 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
771
771
772 # apply failed, strip away that rev and merge.
772 # apply failed, strip away that rev and merge.
773 hg.clean(repo, head)
773 hg.clean(repo, head)
774 strip(self.ui, repo, [n], update=False, backup=False)
774 strip(self.ui, repo, [n], update=False, backup=False)
775
775
776 ctx = repo[rev]
776 ctx = repo[rev]
777 ret = hg.merge(repo, rev)
777 ret = hg.merge(repo, rev)
778 if ret:
778 if ret:
779 raise error.Abort(_("update returned %d") % ret)
779 raise error.Abort(_("update returned %d") % ret)
780 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
780 n = newcommit(repo, None, ctx.description(), ctx.user(), force=True)
781 if n is None:
781 if n is None:
782 raise error.Abort(_("repo commit failed"))
782 raise error.Abort(_("repo commit failed"))
783 try:
783 try:
784 ph = patchheader(mergeq.join(patch), self.plainmode)
784 ph = patchheader(mergeq.join(patch), self.plainmode)
785 except Exception:
785 except Exception:
786 raise error.Abort(_("unable to read %s") % patch)
786 raise error.Abort(_("unable to read %s") % patch)
787
787
788 diffopts = self.patchopts(diffopts, patch)
788 diffopts = self.patchopts(diffopts, patch)
789 patchf = self.opener(patch, "w")
789 patchf = self.opener(patch, "w")
790 comments = bytes(ph)
790 comments = bytes(ph)
791 if comments:
791 if comments:
792 patchf.write(comments)
792 patchf.write(comments)
793 self.printdiff(repo, diffopts, head, n, fp=patchf)
793 self.printdiff(repo, diffopts, head, n, fp=patchf)
794 patchf.close()
794 patchf.close()
795 self.removeundo(repo)
795 self.removeundo(repo)
796 return (0, n)
796 return (0, n)
797
797
798 def qparents(self, repo, rev=None):
798 def qparents(self, repo, rev=None):
799 """return the mq handled parent or p1
799 """return the mq handled parent or p1
800
800
801 In some case where mq get himself in being the parent of a merge the
801 In some case where mq get himself in being the parent of a merge the
802 appropriate parent may be p2.
802 appropriate parent may be p2.
803 (eg: an in progress merge started with mq disabled)
803 (eg: an in progress merge started with mq disabled)
804
804
805 If no parent are managed by mq, p1 is returned.
805 If no parent are managed by mq, p1 is returned.
806 """
806 """
807 if rev is None:
807 if rev is None:
808 (p1, p2) = repo.dirstate.parents()
808 (p1, p2) = repo.dirstate.parents()
809 if p2 == nullid:
809 if p2 == nullid:
810 return p1
810 return p1
811 if not self.applied:
811 if not self.applied:
812 return None
812 return None
813 return self.applied[-1].node
813 return self.applied[-1].node
814 p1, p2 = repo.changelog.parents(rev)
814 p1, p2 = repo.changelog.parents(rev)
815 if p2 != nullid and p2 in [x.node for x in self.applied]:
815 if p2 != nullid and p2 in [x.node for x in self.applied]:
816 return p2
816 return p2
817 return p1
817 return p1
818
818
819 def mergepatch(self, repo, mergeq, series, diffopts):
819 def mergepatch(self, repo, mergeq, series, diffopts):
820 if not self.applied:
820 if not self.applied:
821 # each of the patches merged in will have two parents. This
821 # each of the patches merged in will have two parents. This
822 # can confuse the qrefresh, qdiff, and strip code because it
822 # can confuse the qrefresh, qdiff, and strip code because it
823 # needs to know which parent is actually in the patch queue.
823 # needs to know which parent is actually in the patch queue.
824 # so, we insert a merge marker with only one parent. This way
824 # so, we insert a merge marker with only one parent. This way
825 # the first patch in the queue is never a merge patch
825 # the first patch in the queue is never a merge patch
826 #
826 #
827 pname = ".hg.patches.merge.marker"
827 pname = ".hg.patches.merge.marker"
828 n = newcommit(repo, None, '[mq]: merge marker', force=True)
828 n = newcommit(repo, None, '[mq]: merge marker', force=True)
829 self.removeundo(repo)
829 self.removeundo(repo)
830 self.applied.append(statusentry(n, pname))
830 self.applied.append(statusentry(n, pname))
831 self.applieddirty = True
831 self.applieddirty = True
832
832
833 head = self.qparents(repo)
833 head = self.qparents(repo)
834
834
835 for patch in series:
835 for patch in series:
836 patch = mergeq.lookup(patch, strict=True)
836 patch = mergeq.lookup(patch, strict=True)
837 if not patch:
837 if not patch:
838 self.ui.warn(_("patch %s does not exist\n") % patch)
838 self.ui.warn(_("patch %s does not exist\n") % patch)
839 return (1, None)
839 return (1, None)
840 pushable, reason = self.pushable(patch)
840 pushable, reason = self.pushable(patch)
841 if not pushable:
841 if not pushable:
842 self.explainpushable(patch, all_patches=True)
842 self.explainpushable(patch, all_patches=True)
843 continue
843 continue
844 info = mergeq.isapplied(patch)
844 info = mergeq.isapplied(patch)
845 if not info:
845 if not info:
846 self.ui.warn(_("patch %s is not applied\n") % patch)
846 self.ui.warn(_("patch %s is not applied\n") % patch)
847 return (1, None)
847 return (1, None)
848 rev = info[1]
848 rev = info[1]
849 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
849 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
850 if head:
850 if head:
851 self.applied.append(statusentry(head, patch))
851 self.applied.append(statusentry(head, patch))
852 self.applieddirty = True
852 self.applieddirty = True
853 if err:
853 if err:
854 return (err, head)
854 return (err, head)
855 self.savedirty()
855 self.savedirty()
856 return (0, head)
856 return (0, head)
857
857
858 def patch(self, repo, patchfile):
858 def patch(self, repo, patchfile):
859 '''Apply patchfile to the working directory.
859 '''Apply patchfile to the working directory.
860 patchfile: name of patch file'''
860 patchfile: name of patch file'''
861 files = set()
861 files = set()
862 try:
862 try:
863 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
863 fuzz = patchmod.patch(self.ui, repo, patchfile, strip=1,
864 files=files, eolmode=None)
864 files=files, eolmode=None)
865 return (True, list(files), fuzz)
865 return (True, list(files), fuzz)
866 except Exception as inst:
866 except Exception as inst:
867 self.ui.note(stringutil.forcebytestr(inst) + '\n')
867 self.ui.note(stringutil.forcebytestr(inst) + '\n')
868 if not self.ui.verbose:
868 if not self.ui.verbose:
869 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
869 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
870 self.ui.traceback()
870 self.ui.traceback()
871 return (False, list(files), False)
871 return (False, list(files), False)
872
872
873 def apply(self, repo, series, list=False, update_status=True,
873 def apply(self, repo, series, list=False, update_status=True,
874 strict=False, patchdir=None, merge=None, all_files=None,
874 strict=False, patchdir=None, merge=None, all_files=None,
875 tobackup=None, keepchanges=False):
875 tobackup=None, keepchanges=False):
876 wlock = lock = tr = None
876 wlock = lock = tr = None
877 try:
877 try:
878 wlock = repo.wlock()
878 wlock = repo.wlock()
879 lock = repo.lock()
879 lock = repo.lock()
880 tr = repo.transaction("qpush")
880 tr = repo.transaction("qpush")
881 try:
881 try:
882 ret = self._apply(repo, series, list, update_status,
882 ret = self._apply(repo, series, list, update_status,
883 strict, patchdir, merge, all_files=all_files,
883 strict, patchdir, merge, all_files=all_files,
884 tobackup=tobackup, keepchanges=keepchanges)
884 tobackup=tobackup, keepchanges=keepchanges)
885 tr.close()
885 tr.close()
886 self.savedirty()
886 self.savedirty()
887 return ret
887 return ret
888 except AbortNoCleanup:
888 except AbortNoCleanup:
889 tr.close()
889 tr.close()
890 self.savedirty()
890 self.savedirty()
891 raise
891 raise
892 except: # re-raises
892 except: # re-raises
893 try:
893 try:
894 tr.abort()
894 tr.abort()
895 finally:
895 finally:
896 self.invalidate()
896 self.invalidate()
897 raise
897 raise
898 finally:
898 finally:
899 release(tr, lock, wlock)
899 release(tr, lock, wlock)
900 self.removeundo(repo)
900 self.removeundo(repo)
901
901
902 def _apply(self, repo, series, list=False, update_status=True,
902 def _apply(self, repo, series, list=False, update_status=True,
903 strict=False, patchdir=None, merge=None, all_files=None,
903 strict=False, patchdir=None, merge=None, all_files=None,
904 tobackup=None, keepchanges=False):
904 tobackup=None, keepchanges=False):
905 """returns (error, hash)
905 """returns (error, hash)
906
906
907 error = 1 for unable to read, 2 for patch failed, 3 for patch
907 error = 1 for unable to read, 2 for patch failed, 3 for patch
908 fuzz. tobackup is None or a set of files to backup before they
908 fuzz. tobackup is None or a set of files to backup before they
909 are modified by a patch.
909 are modified by a patch.
910 """
910 """
911 # TODO unify with commands.py
911 # TODO unify with commands.py
912 if not patchdir:
912 if not patchdir:
913 patchdir = self.path
913 patchdir = self.path
914 err = 0
914 err = 0
915 n = None
915 n = None
916 for patchname in series:
916 for patchname in series:
917 pushable, reason = self.pushable(patchname)
917 pushable, reason = self.pushable(patchname)
918 if not pushable:
918 if not pushable:
919 self.explainpushable(patchname, all_patches=True)
919 self.explainpushable(patchname, all_patches=True)
920 continue
920 continue
921 self.ui.status(_("applying %s\n") % patchname)
921 self.ui.status(_("applying %s\n") % patchname)
922 pf = os.path.join(patchdir, patchname)
922 pf = os.path.join(patchdir, patchname)
923
923
924 try:
924 try:
925 ph = patchheader(self.join(patchname), self.plainmode)
925 ph = patchheader(self.join(patchname), self.plainmode)
926 except IOError:
926 except IOError:
927 self.ui.warn(_("unable to read %s\n") % patchname)
927 self.ui.warn(_("unable to read %s\n") % patchname)
928 err = 1
928 err = 1
929 break
929 break
930
930
931 message = ph.message
931 message = ph.message
932 if not message:
932 if not message:
933 # The commit message should not be translated
933 # The commit message should not be translated
934 message = "imported patch %s\n" % patchname
934 message = "imported patch %s\n" % patchname
935 else:
935 else:
936 if list:
936 if list:
937 # The commit message should not be translated
937 # The commit message should not be translated
938 message.append("\nimported patch %s" % patchname)
938 message.append("\nimported patch %s" % patchname)
939 message = '\n'.join(message)
939 message = '\n'.join(message)
940
940
941 if ph.haspatch:
941 if ph.haspatch:
942 if tobackup:
942 if tobackup:
943 touched = patchmod.changedfiles(self.ui, repo, pf)
943 touched = patchmod.changedfiles(self.ui, repo, pf)
944 touched = set(touched) & tobackup
944 touched = set(touched) & tobackup
945 if touched and keepchanges:
945 if touched and keepchanges:
946 raise AbortNoCleanup(
946 raise AbortNoCleanup(
947 _("conflicting local changes found"),
947 _("conflicting local changes found"),
948 hint=_("did you forget to qrefresh?"))
948 hint=_("did you forget to qrefresh?"))
949 self.backup(repo, touched, copy=True)
949 self.backup(repo, touched, copy=True)
950 tobackup = tobackup - touched
950 tobackup = tobackup - touched
951 (patcherr, files, fuzz) = self.patch(repo, pf)
951 (patcherr, files, fuzz) = self.patch(repo, pf)
952 if all_files is not None:
952 if all_files is not None:
953 all_files.update(files)
953 all_files.update(files)
954 patcherr = not patcherr
954 patcherr = not patcherr
955 else:
955 else:
956 self.ui.warn(_("patch %s is empty\n") % patchname)
956 self.ui.warn(_("patch %s is empty\n") % patchname)
957 patcherr, files, fuzz = 0, [], 0
957 patcherr, files, fuzz = 0, [], 0
958
958
959 if merge and files:
959 if merge and files:
960 # Mark as removed/merged and update dirstate parent info
960 # Mark as removed/merged and update dirstate parent info
961 removed = []
961 removed = []
962 merged = []
962 merged = []
963 for f in files:
963 for f in files:
964 if os.path.lexists(repo.wjoin(f)):
964 if os.path.lexists(repo.wjoin(f)):
965 merged.append(f)
965 merged.append(f)
966 else:
966 else:
967 removed.append(f)
967 removed.append(f)
968 with repo.dirstate.parentchange():
968 with repo.dirstate.parentchange():
969 for f in removed:
969 for f in removed:
970 repo.dirstate.remove(f)
970 repo.dirstate.remove(f)
971 for f in merged:
971 for f in merged:
972 repo.dirstate.merge(f)
972 repo.dirstate.merge(f)
973 p1 = repo.dirstate.p1()
973 p1 = repo.dirstate.p1()
974 repo.setparents(p1, merge)
974 repo.setparents(p1, merge)
975
975
976 if all_files and '.hgsubstate' in all_files:
976 if all_files and '.hgsubstate' in all_files:
977 wctx = repo[None]
977 wctx = repo[None]
978 pctx = repo['.']
978 pctx = repo['.']
979 overwrite = False
979 overwrite = False
980 mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
980 mergedsubstate = subrepoutil.submerge(repo, pctx, wctx, wctx,
981 overwrite)
981 overwrite)
982 files += mergedsubstate.keys()
982 files += mergedsubstate.keys()
983
983
984 match = scmutil.matchfiles(repo, files or [])
984 match = scmutil.matchfiles(repo, files or [])
985 oldtip = repo.changelog.tip()
985 oldtip = repo.changelog.tip()
986 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
986 n = newcommit(repo, None, message, ph.user, ph.date, match=match,
987 force=True)
987 force=True)
988 if repo.changelog.tip() == oldtip:
988 if repo.changelog.tip() == oldtip:
989 raise error.Abort(_("qpush exactly duplicates child changeset"))
989 raise error.Abort(_("qpush exactly duplicates child changeset"))
990 if n is None:
990 if n is None:
991 raise error.Abort(_("repository commit failed"))
991 raise error.Abort(_("repository commit failed"))
992
992
993 if update_status:
993 if update_status:
994 self.applied.append(statusentry(n, patchname))
994 self.applied.append(statusentry(n, patchname))
995
995
996 if patcherr:
996 if patcherr:
997 self.ui.warn(_("patch failed, rejects left in working "
997 self.ui.warn(_("patch failed, rejects left in working "
998 "directory\n"))
998 "directory\n"))
999 err = 2
999 err = 2
1000 break
1000 break
1001
1001
1002 if fuzz and strict:
1002 if fuzz and strict:
1003 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
1003 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
1004 err = 3
1004 err = 3
1005 break
1005 break
1006 return (err, n)
1006 return (err, n)
1007
1007
1008 def _cleanup(self, patches, numrevs, keep=False):
1008 def _cleanup(self, patches, numrevs, keep=False):
1009 if not keep:
1009 if not keep:
1010 r = self.qrepo()
1010 r = self.qrepo()
1011 if r:
1011 if r:
1012 r[None].forget(patches)
1012 r[None].forget(patches)
1013 for p in patches:
1013 for p in patches:
1014 try:
1014 try:
1015 os.unlink(self.join(p))
1015 os.unlink(self.join(p))
1016 except OSError as inst:
1016 except OSError as inst:
1017 if inst.errno != errno.ENOENT:
1017 if inst.errno != errno.ENOENT:
1018 raise
1018 raise
1019
1019
1020 qfinished = []
1020 qfinished = []
1021 if numrevs:
1021 if numrevs:
1022 qfinished = self.applied[:numrevs]
1022 qfinished = self.applied[:numrevs]
1023 del self.applied[:numrevs]
1023 del self.applied[:numrevs]
1024 self.applieddirty = True
1024 self.applieddirty = True
1025
1025
1026 unknown = []
1026 unknown = []
1027
1027
1028 sortedseries = []
1028 sortedseries = []
1029 for p in patches:
1029 for p in patches:
1030 idx = self.findseries(p)
1030 idx = self.findseries(p)
1031 if idx is None:
1031 if idx is None:
1032 sortedseries.append((-1, p))
1032 sortedseries.append((-1, p))
1033 else:
1033 else:
1034 sortedseries.append((idx, p))
1034 sortedseries.append((idx, p))
1035
1035
1036 sortedseries.sort(reverse=True)
1036 sortedseries.sort(reverse=True)
1037 for (i, p) in sortedseries:
1037 for (i, p) in sortedseries:
1038 if i != -1:
1038 if i != -1:
1039 del self.fullseries[i]
1039 del self.fullseries[i]
1040 else:
1040 else:
1041 unknown.append(p)
1041 unknown.append(p)
1042
1042
1043 if unknown:
1043 if unknown:
1044 if numrevs:
1044 if numrevs:
1045 rev = dict((entry.name, entry.node) for entry in qfinished)
1045 rev = dict((entry.name, entry.node) for entry in qfinished)
1046 for p in unknown:
1046 for p in unknown:
1047 msg = _('revision %s refers to unknown patches: %s\n')
1047 msg = _('revision %s refers to unknown patches: %s\n')
1048 self.ui.warn(msg % (short(rev[p]), p))
1048 self.ui.warn(msg % (short(rev[p]), p))
1049 else:
1049 else:
1050 msg = _('unknown patches: %s\n')
1050 msg = _('unknown patches: %s\n')
1051 raise error.Abort(''.join(msg % p for p in unknown))
1051 raise error.Abort(''.join(msg % p for p in unknown))
1052
1052
1053 self.parseseries()
1053 self.parseseries()
1054 self.seriesdirty = True
1054 self.seriesdirty = True
1055 return [entry.node for entry in qfinished]
1055 return [entry.node for entry in qfinished]
1056
1056
1057 def _revpatches(self, repo, revs):
1057 def _revpatches(self, repo, revs):
1058 firstrev = repo[self.applied[0].node].rev()
1058 firstrev = repo[self.applied[0].node].rev()
1059 patches = []
1059 patches = []
1060 for i, rev in enumerate(revs):
1060 for i, rev in enumerate(revs):
1061
1061
1062 if rev < firstrev:
1062 if rev < firstrev:
1063 raise error.Abort(_('revision %d is not managed') % rev)
1063 raise error.Abort(_('revision %d is not managed') % rev)
1064
1064
1065 ctx = repo[rev]
1065 ctx = repo[rev]
1066 base = self.applied[i].node
1066 base = self.applied[i].node
1067 if ctx.node() != base:
1067 if ctx.node() != base:
1068 msg = _('cannot delete revision %d above applied patches')
1068 msg = _('cannot delete revision %d above applied patches')
1069 raise error.Abort(msg % rev)
1069 raise error.Abort(msg % rev)
1070
1070
1071 patch = self.applied[i].name
1071 patch = self.applied[i].name
1072 for fmt in ('[mq]: %s', 'imported patch %s'):
1072 for fmt in ('[mq]: %s', 'imported patch %s'):
1073 if ctx.description() == fmt % patch:
1073 if ctx.description() == fmt % patch:
1074 msg = _('patch %s finalized without changeset message\n')
1074 msg = _('patch %s finalized without changeset message\n')
1075 repo.ui.status(msg % patch)
1075 repo.ui.status(msg % patch)
1076 break
1076 break
1077
1077
1078 patches.append(patch)
1078 patches.append(patch)
1079 return patches
1079 return patches
1080
1080
1081 def finish(self, repo, revs):
1081 def finish(self, repo, revs):
1082 # Manually trigger phase computation to ensure phasedefaults is
1082 # Manually trigger phase computation to ensure phasedefaults is
1083 # executed before we remove the patches.
1083 # executed before we remove the patches.
1084 repo._phasecache
1084 repo._phasecache
1085 patches = self._revpatches(repo, sorted(revs))
1085 patches = self._revpatches(repo, sorted(revs))
1086 qfinished = self._cleanup(patches, len(patches))
1086 qfinished = self._cleanup(patches, len(patches))
1087 if qfinished and repo.ui.configbool('mq', 'secret'):
1087 if qfinished and repo.ui.configbool('mq', 'secret'):
1088 # only use this logic when the secret option is added
1088 # only use this logic when the secret option is added
1089 oldqbase = repo[qfinished[0]]
1089 oldqbase = repo[qfinished[0]]
1090 tphase = phases.newcommitphase(repo.ui)
1090 tphase = phases.newcommitphase(repo.ui)
1091 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1091 if oldqbase.phase() > tphase and oldqbase.p1().phase() <= tphase:
1092 with repo.transaction('qfinish') as tr:
1092 with repo.transaction('qfinish') as tr:
1093 phases.advanceboundary(repo, tr, tphase, qfinished)
1093 phases.advanceboundary(repo, tr, tphase, qfinished)
1094
1094
1095 def delete(self, repo, patches, opts):
1095 def delete(self, repo, patches, opts):
1096 if not patches and not opts.get('rev'):
1096 if not patches and not opts.get('rev'):
1097 raise error.Abort(_('qdelete requires at least one revision or '
1097 raise error.Abort(_('qdelete requires at least one revision or '
1098 'patch name'))
1098 'patch name'))
1099
1099
1100 realpatches = []
1100 realpatches = []
1101 for patch in patches:
1101 for patch in patches:
1102 patch = self.lookup(patch, strict=True)
1102 patch = self.lookup(patch, strict=True)
1103 info = self.isapplied(patch)
1103 info = self.isapplied(patch)
1104 if info:
1104 if info:
1105 raise error.Abort(_("cannot delete applied patch %s") % patch)
1105 raise error.Abort(_("cannot delete applied patch %s") % patch)
1106 if patch not in self.series:
1106 if patch not in self.series:
1107 raise error.Abort(_("patch %s not in series file") % patch)
1107 raise error.Abort(_("patch %s not in series file") % patch)
1108 if patch not in realpatches:
1108 if patch not in realpatches:
1109 realpatches.append(patch)
1109 realpatches.append(patch)
1110
1110
1111 numrevs = 0
1111 numrevs = 0
1112 if opts.get('rev'):
1112 if opts.get('rev'):
1113 if not self.applied:
1113 if not self.applied:
1114 raise error.Abort(_('no patches applied'))
1114 raise error.Abort(_('no patches applied'))
1115 revs = scmutil.revrange(repo, opts.get('rev'))
1115 revs = scmutil.revrange(repo, opts.get('rev'))
1116 revs.sort()
1116 revs.sort()
1117 revpatches = self._revpatches(repo, revs)
1117 revpatches = self._revpatches(repo, revs)
1118 realpatches += revpatches
1118 realpatches += revpatches
1119 numrevs = len(revpatches)
1119 numrevs = len(revpatches)
1120
1120
1121 self._cleanup(realpatches, numrevs, opts.get('keep'))
1121 self._cleanup(realpatches, numrevs, opts.get('keep'))
1122
1122
1123 def checktoppatch(self, repo):
1123 def checktoppatch(self, repo):
1124 '''check that working directory is at qtip'''
1124 '''check that working directory is at qtip'''
1125 if self.applied:
1125 if self.applied:
1126 top = self.applied[-1].node
1126 top = self.applied[-1].node
1127 patch = self.applied[-1].name
1127 patch = self.applied[-1].name
1128 if repo.dirstate.p1() != top:
1128 if repo.dirstate.p1() != top:
1129 raise error.Abort(_("working directory revision is not qtip"))
1129 raise error.Abort(_("working directory revision is not qtip"))
1130 return top, patch
1130 return top, patch
1131 return None, None
1131 return None, None
1132
1132
1133 def putsubstate2changes(self, substatestate, changes):
1133 def putsubstate2changes(self, substatestate, changes):
1134 for files in changes[:3]:
1134 for files in changes[:3]:
1135 if '.hgsubstate' in files:
1135 if '.hgsubstate' in files:
1136 return # already listed up
1136 return # already listed up
1137 # not yet listed up
1137 # not yet listed up
1138 if substatestate in 'a?':
1138 if substatestate in 'a?':
1139 changes[1].append('.hgsubstate')
1139 changes[1].append('.hgsubstate')
1140 elif substatestate in 'r':
1140 elif substatestate in 'r':
1141 changes[2].append('.hgsubstate')
1141 changes[2].append('.hgsubstate')
1142 else: # modified
1142 else: # modified
1143 changes[0].append('.hgsubstate')
1143 changes[0].append('.hgsubstate')
1144
1144
1145 def checklocalchanges(self, repo, force=False, refresh=True):
1145 def checklocalchanges(self, repo, force=False, refresh=True):
1146 excsuffix = ''
1146 excsuffix = ''
1147 if refresh:
1147 if refresh:
1148 excsuffix = ', qrefresh first'
1148 excsuffix = ', qrefresh first'
1149 # plain versions for i18n tool to detect them
1149 # plain versions for i18n tool to detect them
1150 _("local changes found, qrefresh first")
1150 _("local changes found, qrefresh first")
1151 _("local changed subrepos found, qrefresh first")
1151 _("local changed subrepos found, qrefresh first")
1152 return checklocalchanges(repo, force, excsuffix)
1152 return checklocalchanges(repo, force, excsuffix)
1153
1153
1154 _reserved = ('series', 'status', 'guards', '.', '..')
1154 _reserved = ('series', 'status', 'guards', '.', '..')
1155 def checkreservedname(self, name):
1155 def checkreservedname(self, name):
1156 if name in self._reserved:
1156 if name in self._reserved:
1157 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1157 raise error.Abort(_('"%s" cannot be used as the name of a patch')
1158 % name)
1158 % name)
1159 if name != name.strip():
1159 if name != name.strip():
1160 # whitespace is stripped by parseseries()
1160 # whitespace is stripped by parseseries()
1161 raise error.Abort(_('patch name cannot begin or end with '
1161 raise error.Abort(_('patch name cannot begin or end with '
1162 'whitespace'))
1162 'whitespace'))
1163 for prefix in ('.hg', '.mq'):
1163 for prefix in ('.hg', '.mq'):
1164 if name.startswith(prefix):
1164 if name.startswith(prefix):
1165 raise error.Abort(_('patch name cannot begin with "%s"')
1165 raise error.Abort(_('patch name cannot begin with "%s"')
1166 % prefix)
1166 % prefix)
1167 for c in ('#', ':', '\r', '\n'):
1167 for c in ('#', ':', '\r', '\n'):
1168 if c in name:
1168 if c in name:
1169 raise error.Abort(_('%r cannot be used in the name of a patch')
1169 raise error.Abort(_('%r cannot be used in the name of a patch')
1170 % pycompat.bytestr(c))
1170 % pycompat.bytestr(c))
1171
1171
1172 def checkpatchname(self, name, force=False):
1172 def checkpatchname(self, name, force=False):
1173 self.checkreservedname(name)
1173 self.checkreservedname(name)
1174 if not force and os.path.exists(self.join(name)):
1174 if not force and os.path.exists(self.join(name)):
1175 if os.path.isdir(self.join(name)):
1175 if os.path.isdir(self.join(name)):
1176 raise error.Abort(_('"%s" already exists as a directory')
1176 raise error.Abort(_('"%s" already exists as a directory')
1177 % name)
1177 % name)
1178 else:
1178 else:
1179 raise error.Abort(_('patch "%s" already exists') % name)
1179 raise error.Abort(_('patch "%s" already exists') % name)
1180
1180
1181 def makepatchname(self, title, fallbackname):
1181 def makepatchname(self, title, fallbackname):
1182 """Return a suitable filename for title, adding a suffix to make
1182 """Return a suitable filename for title, adding a suffix to make
1183 it unique in the existing list"""
1183 it unique in the existing list"""
1184 namebase = re.sub('[\s\W_]+', '_', title.lower()).strip('_')
1184 namebase = re.sub(br'[\s\W_]+', b'_', title.lower()).strip(b'_')
1185 namebase = namebase[:75] # avoid too long name (issue5117)
1185 namebase = namebase[:75] # avoid too long name (issue5117)
1186 if namebase:
1186 if namebase:
1187 try:
1187 try:
1188 self.checkreservedname(namebase)
1188 self.checkreservedname(namebase)
1189 except error.Abort:
1189 except error.Abort:
1190 namebase = fallbackname
1190 namebase = fallbackname
1191 else:
1191 else:
1192 namebase = fallbackname
1192 namebase = fallbackname
1193 name = namebase
1193 name = namebase
1194 i = 0
1194 i = 0
1195 while True:
1195 while True:
1196 if name not in self.fullseries:
1196 if name not in self.fullseries:
1197 try:
1197 try:
1198 self.checkpatchname(name)
1198 self.checkpatchname(name)
1199 break
1199 break
1200 except error.Abort:
1200 except error.Abort:
1201 pass
1201 pass
1202 i += 1
1202 i += 1
1203 name = '%s__%d' % (namebase, i)
1203 name = '%s__%d' % (namebase, i)
1204 return name
1204 return name
1205
1205
1206 def checkkeepchanges(self, keepchanges, force):
1206 def checkkeepchanges(self, keepchanges, force):
1207 if force and keepchanges:
1207 if force and keepchanges:
1208 raise error.Abort(_('cannot use both --force and --keep-changes'))
1208 raise error.Abort(_('cannot use both --force and --keep-changes'))
1209
1209
1210 def new(self, repo, patchfn, *pats, **opts):
1210 def new(self, repo, patchfn, *pats, **opts):
1211 """options:
1211 """options:
1212 msg: a string or a no-argument function returning a string
1212 msg: a string or a no-argument function returning a string
1213 """
1213 """
1214 opts = pycompat.byteskwargs(opts)
1214 opts = pycompat.byteskwargs(opts)
1215 msg = opts.get('msg')
1215 msg = opts.get('msg')
1216 edit = opts.get('edit')
1216 edit = opts.get('edit')
1217 editform = opts.get('editform', 'mq.qnew')
1217 editform = opts.get('editform', 'mq.qnew')
1218 user = opts.get('user')
1218 user = opts.get('user')
1219 date = opts.get('date')
1219 date = opts.get('date')
1220 if date:
1220 if date:
1221 date = dateutil.parsedate(date)
1221 date = dateutil.parsedate(date)
1222 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1222 diffopts = self.diffopts({'git': opts.get('git')}, plain=True)
1223 if opts.get('checkname', True):
1223 if opts.get('checkname', True):
1224 self.checkpatchname(patchfn)
1224 self.checkpatchname(patchfn)
1225 inclsubs = checksubstate(repo)
1225 inclsubs = checksubstate(repo)
1226 if inclsubs:
1226 if inclsubs:
1227 substatestate = repo.dirstate['.hgsubstate']
1227 substatestate = repo.dirstate['.hgsubstate']
1228 if opts.get('include') or opts.get('exclude') or pats:
1228 if opts.get('include') or opts.get('exclude') or pats:
1229 # detect missing files in pats
1229 # detect missing files in pats
1230 def badfn(f, msg):
1230 def badfn(f, msg):
1231 if f != '.hgsubstate': # .hgsubstate is auto-created
1231 if f != '.hgsubstate': # .hgsubstate is auto-created
1232 raise error.Abort('%s: %s' % (f, msg))
1232 raise error.Abort('%s: %s' % (f, msg))
1233 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1233 match = scmutil.match(repo[None], pats, opts, badfn=badfn)
1234 changes = repo.status(match=match)
1234 changes = repo.status(match=match)
1235 else:
1235 else:
1236 changes = self.checklocalchanges(repo, force=True)
1236 changes = self.checklocalchanges(repo, force=True)
1237 commitfiles = list(inclsubs)
1237 commitfiles = list(inclsubs)
1238 for files in changes[:3]:
1238 for files in changes[:3]:
1239 commitfiles.extend(files)
1239 commitfiles.extend(files)
1240 match = scmutil.matchfiles(repo, commitfiles)
1240 match = scmutil.matchfiles(repo, commitfiles)
1241 if len(repo[None].parents()) > 1:
1241 if len(repo[None].parents()) > 1:
1242 raise error.Abort(_('cannot manage merge changesets'))
1242 raise error.Abort(_('cannot manage merge changesets'))
1243 self.checktoppatch(repo)
1243 self.checktoppatch(repo)
1244 insert = self.fullseriesend()
1244 insert = self.fullseriesend()
1245 with repo.wlock():
1245 with repo.wlock():
1246 try:
1246 try:
1247 # if patch file write fails, abort early
1247 # if patch file write fails, abort early
1248 p = self.opener(patchfn, "w")
1248 p = self.opener(patchfn, "w")
1249 except IOError as e:
1249 except IOError as e:
1250 raise error.Abort(_('cannot write patch "%s": %s')
1250 raise error.Abort(_('cannot write patch "%s": %s')
1251 % (patchfn, encoding.strtolocal(e.strerror)))
1251 % (patchfn, encoding.strtolocal(e.strerror)))
1252 try:
1252 try:
1253 defaultmsg = "[mq]: %s" % patchfn
1253 defaultmsg = "[mq]: %s" % patchfn
1254 editor = cmdutil.getcommiteditor(editform=editform)
1254 editor = cmdutil.getcommiteditor(editform=editform)
1255 if edit:
1255 if edit:
1256 def finishdesc(desc):
1256 def finishdesc(desc):
1257 if desc.rstrip():
1257 if desc.rstrip():
1258 return desc
1258 return desc
1259 else:
1259 else:
1260 return defaultmsg
1260 return defaultmsg
1261 # i18n: this message is shown in editor with "HG: " prefix
1261 # i18n: this message is shown in editor with "HG: " prefix
1262 extramsg = _('Leave message empty to use default message.')
1262 extramsg = _('Leave message empty to use default message.')
1263 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1263 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1264 extramsg=extramsg,
1264 extramsg=extramsg,
1265 editform=editform)
1265 editform=editform)
1266 commitmsg = msg
1266 commitmsg = msg
1267 else:
1267 else:
1268 commitmsg = msg or defaultmsg
1268 commitmsg = msg or defaultmsg
1269
1269
1270 n = newcommit(repo, None, commitmsg, user, date, match=match,
1270 n = newcommit(repo, None, commitmsg, user, date, match=match,
1271 force=True, editor=editor)
1271 force=True, editor=editor)
1272 if n is None:
1272 if n is None:
1273 raise error.Abort(_("repo commit failed"))
1273 raise error.Abort(_("repo commit failed"))
1274 try:
1274 try:
1275 self.fullseries[insert:insert] = [patchfn]
1275 self.fullseries[insert:insert] = [patchfn]
1276 self.applied.append(statusentry(n, patchfn))
1276 self.applied.append(statusentry(n, patchfn))
1277 self.parseseries()
1277 self.parseseries()
1278 self.seriesdirty = True
1278 self.seriesdirty = True
1279 self.applieddirty = True
1279 self.applieddirty = True
1280 nctx = repo[n]
1280 nctx = repo[n]
1281 ph = patchheader(self.join(patchfn), self.plainmode)
1281 ph = patchheader(self.join(patchfn), self.plainmode)
1282 if user:
1282 if user:
1283 ph.setuser(user)
1283 ph.setuser(user)
1284 if date:
1284 if date:
1285 ph.setdate('%d %d' % date)
1285 ph.setdate('%d %d' % date)
1286 ph.setparent(hex(nctx.p1().node()))
1286 ph.setparent(hex(nctx.p1().node()))
1287 msg = nctx.description().strip()
1287 msg = nctx.description().strip()
1288 if msg == defaultmsg.strip():
1288 if msg == defaultmsg.strip():
1289 msg = ''
1289 msg = ''
1290 ph.setmessage(msg)
1290 ph.setmessage(msg)
1291 p.write(bytes(ph))
1291 p.write(bytes(ph))
1292 if commitfiles:
1292 if commitfiles:
1293 parent = self.qparents(repo, n)
1293 parent = self.qparents(repo, n)
1294 if inclsubs:
1294 if inclsubs:
1295 self.putsubstate2changes(substatestate, changes)
1295 self.putsubstate2changes(substatestate, changes)
1296 chunks = patchmod.diff(repo, node1=parent, node2=n,
1296 chunks = patchmod.diff(repo, node1=parent, node2=n,
1297 changes=changes, opts=diffopts)
1297 changes=changes, opts=diffopts)
1298 for chunk in chunks:
1298 for chunk in chunks:
1299 p.write(chunk)
1299 p.write(chunk)
1300 p.close()
1300 p.close()
1301 r = self.qrepo()
1301 r = self.qrepo()
1302 if r:
1302 if r:
1303 r[None].add([patchfn])
1303 r[None].add([patchfn])
1304 except: # re-raises
1304 except: # re-raises
1305 repo.rollback()
1305 repo.rollback()
1306 raise
1306 raise
1307 except Exception:
1307 except Exception:
1308 patchpath = self.join(patchfn)
1308 patchpath = self.join(patchfn)
1309 try:
1309 try:
1310 os.unlink(patchpath)
1310 os.unlink(patchpath)
1311 except OSError:
1311 except OSError:
1312 self.ui.warn(_('error unlinking %s\n') % patchpath)
1312 self.ui.warn(_('error unlinking %s\n') % patchpath)
1313 raise
1313 raise
1314 self.removeundo(repo)
1314 self.removeundo(repo)
1315
1315
1316 def isapplied(self, patch):
1316 def isapplied(self, patch):
1317 """returns (index, rev, patch)"""
1317 """returns (index, rev, patch)"""
1318 for i, a in enumerate(self.applied):
1318 for i, a in enumerate(self.applied):
1319 if a.name == patch:
1319 if a.name == patch:
1320 return (i, a.node, a.name)
1320 return (i, a.node, a.name)
1321 return None
1321 return None
1322
1322
1323 # if the exact patch name does not exist, we try a few
1323 # if the exact patch name does not exist, we try a few
1324 # variations. If strict is passed, we try only #1
1324 # variations. If strict is passed, we try only #1
1325 #
1325 #
1326 # 1) a number (as string) to indicate an offset in the series file
1326 # 1) a number (as string) to indicate an offset in the series file
1327 # 2) a unique substring of the patch name was given
1327 # 2) a unique substring of the patch name was given
1328 # 3) patchname[-+]num to indicate an offset in the series file
1328 # 3) patchname[-+]num to indicate an offset in the series file
1329 def lookup(self, patch, strict=False):
1329 def lookup(self, patch, strict=False):
1330 def partialname(s):
1330 def partialname(s):
1331 if s in self.series:
1331 if s in self.series:
1332 return s
1332 return s
1333 matches = [x for x in self.series if s in x]
1333 matches = [x for x in self.series if s in x]
1334 if len(matches) > 1:
1334 if len(matches) > 1:
1335 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1335 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
1336 for m in matches:
1336 for m in matches:
1337 self.ui.warn(' %s\n' % m)
1337 self.ui.warn(' %s\n' % m)
1338 return None
1338 return None
1339 if matches:
1339 if matches:
1340 return matches[0]
1340 return matches[0]
1341 if self.series and self.applied:
1341 if self.series and self.applied:
1342 if s == 'qtip':
1342 if s == 'qtip':
1343 return self.series[self.seriesend(True) - 1]
1343 return self.series[self.seriesend(True) - 1]
1344 if s == 'qbase':
1344 if s == 'qbase':
1345 return self.series[0]
1345 return self.series[0]
1346 return None
1346 return None
1347
1347
1348 if patch in self.series:
1348 if patch in self.series:
1349 return patch
1349 return patch
1350
1350
1351 if not os.path.isfile(self.join(patch)):
1351 if not os.path.isfile(self.join(patch)):
1352 try:
1352 try:
1353 sno = int(patch)
1353 sno = int(patch)
1354 except (ValueError, OverflowError):
1354 except (ValueError, OverflowError):
1355 pass
1355 pass
1356 else:
1356 else:
1357 if -len(self.series) <= sno < len(self.series):
1357 if -len(self.series) <= sno < len(self.series):
1358 return self.series[sno]
1358 return self.series[sno]
1359
1359
1360 if not strict:
1360 if not strict:
1361 res = partialname(patch)
1361 res = partialname(patch)
1362 if res:
1362 if res:
1363 return res
1363 return res
1364 minus = patch.rfind('-')
1364 minus = patch.rfind('-')
1365 if minus >= 0:
1365 if minus >= 0:
1366 res = partialname(patch[:minus])
1366 res = partialname(patch[:minus])
1367 if res:
1367 if res:
1368 i = self.series.index(res)
1368 i = self.series.index(res)
1369 try:
1369 try:
1370 off = int(patch[minus + 1:] or 1)
1370 off = int(patch[minus + 1:] or 1)
1371 except (ValueError, OverflowError):
1371 except (ValueError, OverflowError):
1372 pass
1372 pass
1373 else:
1373 else:
1374 if i - off >= 0:
1374 if i - off >= 0:
1375 return self.series[i - off]
1375 return self.series[i - off]
1376 plus = patch.rfind('+')
1376 plus = patch.rfind('+')
1377 if plus >= 0:
1377 if plus >= 0:
1378 res = partialname(patch[:plus])
1378 res = partialname(patch[:plus])
1379 if res:
1379 if res:
1380 i = self.series.index(res)
1380 i = self.series.index(res)
1381 try:
1381 try:
1382 off = int(patch[plus + 1:] or 1)
1382 off = int(patch[plus + 1:] or 1)
1383 except (ValueError, OverflowError):
1383 except (ValueError, OverflowError):
1384 pass
1384 pass
1385 else:
1385 else:
1386 if i + off < len(self.series):
1386 if i + off < len(self.series):
1387 return self.series[i + off]
1387 return self.series[i + off]
1388 raise error.Abort(_("patch %s not in series") % patch)
1388 raise error.Abort(_("patch %s not in series") % patch)
1389
1389
1390 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1390 def push(self, repo, patch=None, force=False, list=False, mergeq=None,
1391 all=False, move=False, exact=False, nobackup=False,
1391 all=False, move=False, exact=False, nobackup=False,
1392 keepchanges=False):
1392 keepchanges=False):
1393 self.checkkeepchanges(keepchanges, force)
1393 self.checkkeepchanges(keepchanges, force)
1394 diffopts = self.diffopts()
1394 diffopts = self.diffopts()
1395 with repo.wlock():
1395 with repo.wlock():
1396 heads = []
1396 heads = []
1397 for hs in repo.branchmap().itervalues():
1397 for hs in repo.branchmap().itervalues():
1398 heads.extend(hs)
1398 heads.extend(hs)
1399 if not heads:
1399 if not heads:
1400 heads = [nullid]
1400 heads = [nullid]
1401 if repo.dirstate.p1() not in heads and not exact:
1401 if repo.dirstate.p1() not in heads and not exact:
1402 self.ui.status(_("(working directory not at a head)\n"))
1402 self.ui.status(_("(working directory not at a head)\n"))
1403
1403
1404 if not self.series:
1404 if not self.series:
1405 self.ui.warn(_('no patches in series\n'))
1405 self.ui.warn(_('no patches in series\n'))
1406 return 0
1406 return 0
1407
1407
1408 # Suppose our series file is: A B C and the current 'top'
1408 # Suppose our series file is: A B C and the current 'top'
1409 # patch is B. qpush C should be performed (moving forward)
1409 # patch is B. qpush C should be performed (moving forward)
1410 # qpush B is a NOP (no change) qpush A is an error (can't
1410 # qpush B is a NOP (no change) qpush A is an error (can't
1411 # go backwards with qpush)
1411 # go backwards with qpush)
1412 if patch:
1412 if patch:
1413 patch = self.lookup(patch)
1413 patch = self.lookup(patch)
1414 info = self.isapplied(patch)
1414 info = self.isapplied(patch)
1415 if info and info[0] >= len(self.applied) - 1:
1415 if info and info[0] >= len(self.applied) - 1:
1416 self.ui.warn(
1416 self.ui.warn(
1417 _('qpush: %s is already at the top\n') % patch)
1417 _('qpush: %s is already at the top\n') % patch)
1418 return 0
1418 return 0
1419
1419
1420 pushable, reason = self.pushable(patch)
1420 pushable, reason = self.pushable(patch)
1421 if pushable:
1421 if pushable:
1422 if self.series.index(patch) < self.seriesend():
1422 if self.series.index(patch) < self.seriesend():
1423 raise error.Abort(
1423 raise error.Abort(
1424 _("cannot push to a previous patch: %s") % patch)
1424 _("cannot push to a previous patch: %s") % patch)
1425 else:
1425 else:
1426 if reason:
1426 if reason:
1427 reason = _('guarded by %s') % reason
1427 reason = _('guarded by %s') % reason
1428 else:
1428 else:
1429 reason = _('no matching guards')
1429 reason = _('no matching guards')
1430 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1430 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1431 return 1
1431 return 1
1432 elif all:
1432 elif all:
1433 patch = self.series[-1]
1433 patch = self.series[-1]
1434 if self.isapplied(patch):
1434 if self.isapplied(patch):
1435 self.ui.warn(_('all patches are currently applied\n'))
1435 self.ui.warn(_('all patches are currently applied\n'))
1436 return 0
1436 return 0
1437
1437
1438 # Following the above example, starting at 'top' of B:
1438 # Following the above example, starting at 'top' of B:
1439 # qpush should be performed (pushes C), but a subsequent
1439 # qpush should be performed (pushes C), but a subsequent
1440 # qpush without an argument is an error (nothing to
1440 # qpush without an argument is an error (nothing to
1441 # apply). This allows a loop of "...while hg qpush..." to
1441 # apply). This allows a loop of "...while hg qpush..." to
1442 # work as it detects an error when done
1442 # work as it detects an error when done
1443 start = self.seriesend()
1443 start = self.seriesend()
1444 if start == len(self.series):
1444 if start == len(self.series):
1445 self.ui.warn(_('patch series already fully applied\n'))
1445 self.ui.warn(_('patch series already fully applied\n'))
1446 return 1
1446 return 1
1447 if not force and not keepchanges:
1447 if not force and not keepchanges:
1448 self.checklocalchanges(repo, refresh=self.applied)
1448 self.checklocalchanges(repo, refresh=self.applied)
1449
1449
1450 if exact:
1450 if exact:
1451 if keepchanges:
1451 if keepchanges:
1452 raise error.Abort(
1452 raise error.Abort(
1453 _("cannot use --exact and --keep-changes together"))
1453 _("cannot use --exact and --keep-changes together"))
1454 if move:
1454 if move:
1455 raise error.Abort(_('cannot use --exact and --move '
1455 raise error.Abort(_('cannot use --exact and --move '
1456 'together'))
1456 'together'))
1457 if self.applied:
1457 if self.applied:
1458 raise error.Abort(_('cannot push --exact with applied '
1458 raise error.Abort(_('cannot push --exact with applied '
1459 'patches'))
1459 'patches'))
1460 root = self.series[start]
1460 root = self.series[start]
1461 target = patchheader(self.join(root), self.plainmode).parent
1461 target = patchheader(self.join(root), self.plainmode).parent
1462 if not target:
1462 if not target:
1463 raise error.Abort(
1463 raise error.Abort(
1464 _("%s does not have a parent recorded") % root)
1464 _("%s does not have a parent recorded") % root)
1465 if not repo[target] == repo['.']:
1465 if not repo[target] == repo['.']:
1466 hg.update(repo, target)
1466 hg.update(repo, target)
1467
1467
1468 if move:
1468 if move:
1469 if not patch:
1469 if not patch:
1470 raise error.Abort(_("please specify the patch to move"))
1470 raise error.Abort(_("please specify the patch to move"))
1471 for fullstart, rpn in enumerate(self.fullseries):
1471 for fullstart, rpn in enumerate(self.fullseries):
1472 # strip markers for patch guards
1472 # strip markers for patch guards
1473 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1473 if self.guard_re.split(rpn, 1)[0] == self.series[start]:
1474 break
1474 break
1475 for i, rpn in enumerate(self.fullseries[fullstart:]):
1475 for i, rpn in enumerate(self.fullseries[fullstart:]):
1476 # strip markers for patch guards
1476 # strip markers for patch guards
1477 if self.guard_re.split(rpn, 1)[0] == patch:
1477 if self.guard_re.split(rpn, 1)[0] == patch:
1478 break
1478 break
1479 index = fullstart + i
1479 index = fullstart + i
1480 assert index < len(self.fullseries)
1480 assert index < len(self.fullseries)
1481 fullpatch = self.fullseries[index]
1481 fullpatch = self.fullseries[index]
1482 del self.fullseries[index]
1482 del self.fullseries[index]
1483 self.fullseries.insert(fullstart, fullpatch)
1483 self.fullseries.insert(fullstart, fullpatch)
1484 self.parseseries()
1484 self.parseseries()
1485 self.seriesdirty = True
1485 self.seriesdirty = True
1486
1486
1487 self.applieddirty = True
1487 self.applieddirty = True
1488 if start > 0:
1488 if start > 0:
1489 self.checktoppatch(repo)
1489 self.checktoppatch(repo)
1490 if not patch:
1490 if not patch:
1491 patch = self.series[start]
1491 patch = self.series[start]
1492 end = start + 1
1492 end = start + 1
1493 else:
1493 else:
1494 end = self.series.index(patch, start) + 1
1494 end = self.series.index(patch, start) + 1
1495
1495
1496 tobackup = set()
1496 tobackup = set()
1497 if (not nobackup and force) or keepchanges:
1497 if (not nobackup and force) or keepchanges:
1498 status = self.checklocalchanges(repo, force=True)
1498 status = self.checklocalchanges(repo, force=True)
1499 if keepchanges:
1499 if keepchanges:
1500 tobackup.update(status.modified + status.added +
1500 tobackup.update(status.modified + status.added +
1501 status.removed + status.deleted)
1501 status.removed + status.deleted)
1502 else:
1502 else:
1503 tobackup.update(status.modified + status.added)
1503 tobackup.update(status.modified + status.added)
1504
1504
1505 s = self.series[start:end]
1505 s = self.series[start:end]
1506 all_files = set()
1506 all_files = set()
1507 try:
1507 try:
1508 if mergeq:
1508 if mergeq:
1509 ret = self.mergepatch(repo, mergeq, s, diffopts)
1509 ret = self.mergepatch(repo, mergeq, s, diffopts)
1510 else:
1510 else:
1511 ret = self.apply(repo, s, list, all_files=all_files,
1511 ret = self.apply(repo, s, list, all_files=all_files,
1512 tobackup=tobackup, keepchanges=keepchanges)
1512 tobackup=tobackup, keepchanges=keepchanges)
1513 except AbortNoCleanup:
1513 except AbortNoCleanup:
1514 raise
1514 raise
1515 except: # re-raises
1515 except: # re-raises
1516 self.ui.warn(_('cleaning up working directory...\n'))
1516 self.ui.warn(_('cleaning up working directory...\n'))
1517 cmdutil.revert(self.ui, repo, repo['.'],
1517 cmdutil.revert(self.ui, repo, repo['.'],
1518 repo.dirstate.parents(), no_backup=True)
1518 repo.dirstate.parents(), no_backup=True)
1519 # only remove unknown files that we know we touched or
1519 # only remove unknown files that we know we touched or
1520 # created while patching
1520 # created while patching
1521 for f in all_files:
1521 for f in all_files:
1522 if f not in repo.dirstate:
1522 if f not in repo.dirstate:
1523 repo.wvfs.unlinkpath(f, ignoremissing=True)
1523 repo.wvfs.unlinkpath(f, ignoremissing=True)
1524 self.ui.warn(_('done\n'))
1524 self.ui.warn(_('done\n'))
1525 raise
1525 raise
1526
1526
1527 if not self.applied:
1527 if not self.applied:
1528 return ret[0]
1528 return ret[0]
1529 top = self.applied[-1].name
1529 top = self.applied[-1].name
1530 if ret[0] and ret[0] > 1:
1530 if ret[0] and ret[0] > 1:
1531 msg = _("errors during apply, please fix and qrefresh %s\n")
1531 msg = _("errors during apply, please fix and qrefresh %s\n")
1532 self.ui.write(msg % top)
1532 self.ui.write(msg % top)
1533 else:
1533 else:
1534 self.ui.write(_("now at: %s\n") % top)
1534 self.ui.write(_("now at: %s\n") % top)
1535 return ret[0]
1535 return ret[0]
1536
1536
1537 def pop(self, repo, patch=None, force=False, update=True, all=False,
1537 def pop(self, repo, patch=None, force=False, update=True, all=False,
1538 nobackup=False, keepchanges=False):
1538 nobackup=False, keepchanges=False):
1539 self.checkkeepchanges(keepchanges, force)
1539 self.checkkeepchanges(keepchanges, force)
1540 with repo.wlock():
1540 with repo.wlock():
1541 if patch:
1541 if patch:
1542 # index, rev, patch
1542 # index, rev, patch
1543 info = self.isapplied(patch)
1543 info = self.isapplied(patch)
1544 if not info:
1544 if not info:
1545 patch = self.lookup(patch)
1545 patch = self.lookup(patch)
1546 info = self.isapplied(patch)
1546 info = self.isapplied(patch)
1547 if not info:
1547 if not info:
1548 raise error.Abort(_("patch %s is not applied") % patch)
1548 raise error.Abort(_("patch %s is not applied") % patch)
1549
1549
1550 if not self.applied:
1550 if not self.applied:
1551 # Allow qpop -a to work repeatedly,
1551 # Allow qpop -a to work repeatedly,
1552 # but not qpop without an argument
1552 # but not qpop without an argument
1553 self.ui.warn(_("no patches applied\n"))
1553 self.ui.warn(_("no patches applied\n"))
1554 return not all
1554 return not all
1555
1555
1556 if all:
1556 if all:
1557 start = 0
1557 start = 0
1558 elif patch:
1558 elif patch:
1559 start = info[0] + 1
1559 start = info[0] + 1
1560 else:
1560 else:
1561 start = len(self.applied) - 1
1561 start = len(self.applied) - 1
1562
1562
1563 if start >= len(self.applied):
1563 if start >= len(self.applied):
1564 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1564 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1565 return
1565 return
1566
1566
1567 if not update:
1567 if not update:
1568 parents = repo.dirstate.parents()
1568 parents = repo.dirstate.parents()
1569 rr = [x.node for x in self.applied]
1569 rr = [x.node for x in self.applied]
1570 for p in parents:
1570 for p in parents:
1571 if p in rr:
1571 if p in rr:
1572 self.ui.warn(_("qpop: forcing dirstate update\n"))
1572 self.ui.warn(_("qpop: forcing dirstate update\n"))
1573 update = True
1573 update = True
1574 else:
1574 else:
1575 parents = [p.node() for p in repo[None].parents()]
1575 parents = [p.node() for p in repo[None].parents()]
1576 update = any(entry.node in parents
1576 update = any(entry.node in parents
1577 for entry in self.applied[start:])
1577 for entry in self.applied[start:])
1578
1578
1579 tobackup = set()
1579 tobackup = set()
1580 if update:
1580 if update:
1581 s = self.checklocalchanges(repo, force=force or keepchanges)
1581 s = self.checklocalchanges(repo, force=force or keepchanges)
1582 if force:
1582 if force:
1583 if not nobackup:
1583 if not nobackup:
1584 tobackup.update(s.modified + s.added)
1584 tobackup.update(s.modified + s.added)
1585 elif keepchanges:
1585 elif keepchanges:
1586 tobackup.update(s.modified + s.added +
1586 tobackup.update(s.modified + s.added +
1587 s.removed + s.deleted)
1587 s.removed + s.deleted)
1588
1588
1589 self.applieddirty = True
1589 self.applieddirty = True
1590 end = len(self.applied)
1590 end = len(self.applied)
1591 rev = self.applied[start].node
1591 rev = self.applied[start].node
1592
1592
1593 try:
1593 try:
1594 heads = repo.changelog.heads(rev)
1594 heads = repo.changelog.heads(rev)
1595 except error.LookupError:
1595 except error.LookupError:
1596 node = short(rev)
1596 node = short(rev)
1597 raise error.Abort(_('trying to pop unknown node %s') % node)
1597 raise error.Abort(_('trying to pop unknown node %s') % node)
1598
1598
1599 if heads != [self.applied[-1].node]:
1599 if heads != [self.applied[-1].node]:
1600 raise error.Abort(_("popping would remove a revision not "
1600 raise error.Abort(_("popping would remove a revision not "
1601 "managed by this patch queue"))
1601 "managed by this patch queue"))
1602 if not repo[self.applied[-1].node].mutable():
1602 if not repo[self.applied[-1].node].mutable():
1603 raise error.Abort(
1603 raise error.Abort(
1604 _("popping would remove a public revision"),
1604 _("popping would remove a public revision"),
1605 hint=_("see 'hg help phases' for details"))
1605 hint=_("see 'hg help phases' for details"))
1606
1606
1607 # we know there are no local changes, so we can make a simplified
1607 # we know there are no local changes, so we can make a simplified
1608 # form of hg.update.
1608 # form of hg.update.
1609 if update:
1609 if update:
1610 qp = self.qparents(repo, rev)
1610 qp = self.qparents(repo, rev)
1611 ctx = repo[qp]
1611 ctx = repo[qp]
1612 m, a, r, d = repo.status(qp, '.')[:4]
1612 m, a, r, d = repo.status(qp, '.')[:4]
1613 if d:
1613 if d:
1614 raise error.Abort(_("deletions found between repo revs"))
1614 raise error.Abort(_("deletions found between repo revs"))
1615
1615
1616 tobackup = set(a + m + r) & tobackup
1616 tobackup = set(a + m + r) & tobackup
1617 if keepchanges and tobackup:
1617 if keepchanges and tobackup:
1618 raise error.Abort(_("local changes found, qrefresh first"))
1618 raise error.Abort(_("local changes found, qrefresh first"))
1619 self.backup(repo, tobackup)
1619 self.backup(repo, tobackup)
1620 with repo.dirstate.parentchange():
1620 with repo.dirstate.parentchange():
1621 for f in a:
1621 for f in a:
1622 repo.wvfs.unlinkpath(f, ignoremissing=True)
1622 repo.wvfs.unlinkpath(f, ignoremissing=True)
1623 repo.dirstate.drop(f)
1623 repo.dirstate.drop(f)
1624 for f in m + r:
1624 for f in m + r:
1625 fctx = ctx[f]
1625 fctx = ctx[f]
1626 repo.wwrite(f, fctx.data(), fctx.flags())
1626 repo.wwrite(f, fctx.data(), fctx.flags())
1627 repo.dirstate.normal(f)
1627 repo.dirstate.normal(f)
1628 repo.setparents(qp, nullid)
1628 repo.setparents(qp, nullid)
1629 for patch in reversed(self.applied[start:end]):
1629 for patch in reversed(self.applied[start:end]):
1630 self.ui.status(_("popping %s\n") % patch.name)
1630 self.ui.status(_("popping %s\n") % patch.name)
1631 del self.applied[start:end]
1631 del self.applied[start:end]
1632 strip(self.ui, repo, [rev], update=False, backup=False)
1632 strip(self.ui, repo, [rev], update=False, backup=False)
1633 for s, state in repo['.'].substate.items():
1633 for s, state in repo['.'].substate.items():
1634 repo['.'].sub(s).get(state)
1634 repo['.'].sub(s).get(state)
1635 if self.applied:
1635 if self.applied:
1636 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1636 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1637 else:
1637 else:
1638 self.ui.write(_("patch queue now empty\n"))
1638 self.ui.write(_("patch queue now empty\n"))
1639
1639
1640 def diff(self, repo, pats, opts):
1640 def diff(self, repo, pats, opts):
1641 top, patch = self.checktoppatch(repo)
1641 top, patch = self.checktoppatch(repo)
1642 if not top:
1642 if not top:
1643 self.ui.write(_("no patches applied\n"))
1643 self.ui.write(_("no patches applied\n"))
1644 return
1644 return
1645 qp = self.qparents(repo, top)
1645 qp = self.qparents(repo, top)
1646 if opts.get('reverse'):
1646 if opts.get('reverse'):
1647 node1, node2 = None, qp
1647 node1, node2 = None, qp
1648 else:
1648 else:
1649 node1, node2 = qp, None
1649 node1, node2 = qp, None
1650 diffopts = self.diffopts(opts, patch)
1650 diffopts = self.diffopts(opts, patch)
1651 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1651 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1652
1652
1653 def refresh(self, repo, pats=None, **opts):
1653 def refresh(self, repo, pats=None, **opts):
1654 opts = pycompat.byteskwargs(opts)
1654 opts = pycompat.byteskwargs(opts)
1655 if not self.applied:
1655 if not self.applied:
1656 self.ui.write(_("no patches applied\n"))
1656 self.ui.write(_("no patches applied\n"))
1657 return 1
1657 return 1
1658 msg = opts.get('msg', '').rstrip()
1658 msg = opts.get('msg', '').rstrip()
1659 edit = opts.get('edit')
1659 edit = opts.get('edit')
1660 editform = opts.get('editform', 'mq.qrefresh')
1660 editform = opts.get('editform', 'mq.qrefresh')
1661 newuser = opts.get('user')
1661 newuser = opts.get('user')
1662 newdate = opts.get('date')
1662 newdate = opts.get('date')
1663 if newdate:
1663 if newdate:
1664 newdate = '%d %d' % dateutil.parsedate(newdate)
1664 newdate = '%d %d' % dateutil.parsedate(newdate)
1665 wlock = repo.wlock()
1665 wlock = repo.wlock()
1666
1666
1667 try:
1667 try:
1668 self.checktoppatch(repo)
1668 self.checktoppatch(repo)
1669 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1669 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1670 if repo.changelog.heads(top) != [top]:
1670 if repo.changelog.heads(top) != [top]:
1671 raise error.Abort(_("cannot qrefresh a revision with children"))
1671 raise error.Abort(_("cannot qrefresh a revision with children"))
1672 if not repo[top].mutable():
1672 if not repo[top].mutable():
1673 raise error.Abort(_("cannot qrefresh public revision"),
1673 raise error.Abort(_("cannot qrefresh public revision"),
1674 hint=_("see 'hg help phases' for details"))
1674 hint=_("see 'hg help phases' for details"))
1675
1675
1676 cparents = repo.changelog.parents(top)
1676 cparents = repo.changelog.parents(top)
1677 patchparent = self.qparents(repo, top)
1677 patchparent = self.qparents(repo, top)
1678
1678
1679 inclsubs = checksubstate(repo, patchparent)
1679 inclsubs = checksubstate(repo, patchparent)
1680 if inclsubs:
1680 if inclsubs:
1681 substatestate = repo.dirstate['.hgsubstate']
1681 substatestate = repo.dirstate['.hgsubstate']
1682
1682
1683 ph = patchheader(self.join(patchfn), self.plainmode)
1683 ph = patchheader(self.join(patchfn), self.plainmode)
1684 diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
1684 diffopts = self.diffopts({'git': opts.get('git')}, patchfn,
1685 plain=True)
1685 plain=True)
1686 if newuser:
1686 if newuser:
1687 ph.setuser(newuser)
1687 ph.setuser(newuser)
1688 if newdate:
1688 if newdate:
1689 ph.setdate(newdate)
1689 ph.setdate(newdate)
1690 ph.setparent(hex(patchparent))
1690 ph.setparent(hex(patchparent))
1691
1691
1692 # only commit new patch when write is complete
1692 # only commit new patch when write is complete
1693 patchf = self.opener(patchfn, 'w', atomictemp=True)
1693 patchf = self.opener(patchfn, 'w', atomictemp=True)
1694
1694
1695 # update the dirstate in place, strip off the qtip commit
1695 # update the dirstate in place, strip off the qtip commit
1696 # and then commit.
1696 # and then commit.
1697 #
1697 #
1698 # this should really read:
1698 # this should really read:
1699 # mm, dd, aa = repo.status(top, patchparent)[:3]
1699 # mm, dd, aa = repo.status(top, patchparent)[:3]
1700 # but we do it backwards to take advantage of manifest/changelog
1700 # but we do it backwards to take advantage of manifest/changelog
1701 # caching against the next repo.status call
1701 # caching against the next repo.status call
1702 mm, aa, dd = repo.status(patchparent, top)[:3]
1702 mm, aa, dd = repo.status(patchparent, top)[:3]
1703 changes = repo.changelog.read(top)
1703 changes = repo.changelog.read(top)
1704 man = repo.manifestlog[changes[0]].read()
1704 man = repo.manifestlog[changes[0]].read()
1705 aaa = aa[:]
1705 aaa = aa[:]
1706 match1 = scmutil.match(repo[None], pats, opts)
1706 match1 = scmutil.match(repo[None], pats, opts)
1707 # in short mode, we only diff the files included in the
1707 # in short mode, we only diff the files included in the
1708 # patch already plus specified files
1708 # patch already plus specified files
1709 if opts.get('short'):
1709 if opts.get('short'):
1710 # if amending a patch, we start with existing
1710 # if amending a patch, we start with existing
1711 # files plus specified files - unfiltered
1711 # files plus specified files - unfiltered
1712 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1712 match = scmutil.matchfiles(repo, mm + aa + dd + match1.files())
1713 # filter with include/exclude options
1713 # filter with include/exclude options
1714 match1 = scmutil.match(repo[None], opts=opts)
1714 match1 = scmutil.match(repo[None], opts=opts)
1715 else:
1715 else:
1716 match = scmutil.matchall(repo)
1716 match = scmutil.matchall(repo)
1717 m, a, r, d = repo.status(match=match)[:4]
1717 m, a, r, d = repo.status(match=match)[:4]
1718 mm = set(mm)
1718 mm = set(mm)
1719 aa = set(aa)
1719 aa = set(aa)
1720 dd = set(dd)
1720 dd = set(dd)
1721
1721
1722 # we might end up with files that were added between
1722 # we might end up with files that were added between
1723 # qtip and the dirstate parent, but then changed in the
1723 # qtip and the dirstate parent, but then changed in the
1724 # local dirstate. in this case, we want them to only
1724 # local dirstate. in this case, we want them to only
1725 # show up in the added section
1725 # show up in the added section
1726 for x in m:
1726 for x in m:
1727 if x not in aa:
1727 if x not in aa:
1728 mm.add(x)
1728 mm.add(x)
1729 # we might end up with files added by the local dirstate that
1729 # we might end up with files added by the local dirstate that
1730 # were deleted by the patch. In this case, they should only
1730 # were deleted by the patch. In this case, they should only
1731 # show up in the changed section.
1731 # show up in the changed section.
1732 for x in a:
1732 for x in a:
1733 if x in dd:
1733 if x in dd:
1734 dd.remove(x)
1734 dd.remove(x)
1735 mm.add(x)
1735 mm.add(x)
1736 else:
1736 else:
1737 aa.add(x)
1737 aa.add(x)
1738 # make sure any files deleted in the local dirstate
1738 # make sure any files deleted in the local dirstate
1739 # are not in the add or change column of the patch
1739 # are not in the add or change column of the patch
1740 forget = []
1740 forget = []
1741 for x in d + r:
1741 for x in d + r:
1742 if x in aa:
1742 if x in aa:
1743 aa.remove(x)
1743 aa.remove(x)
1744 forget.append(x)
1744 forget.append(x)
1745 continue
1745 continue
1746 else:
1746 else:
1747 mm.discard(x)
1747 mm.discard(x)
1748 dd.add(x)
1748 dd.add(x)
1749
1749
1750 m = list(mm)
1750 m = list(mm)
1751 r = list(dd)
1751 r = list(dd)
1752 a = list(aa)
1752 a = list(aa)
1753
1753
1754 # create 'match' that includes the files to be recommitted.
1754 # create 'match' that includes the files to be recommitted.
1755 # apply match1 via repo.status to ensure correct case handling.
1755 # apply match1 via repo.status to ensure correct case handling.
1756 cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
1756 cm, ca, cr, cd = repo.status(patchparent, match=match1)[:4]
1757 allmatches = set(cm + ca + cr + cd)
1757 allmatches = set(cm + ca + cr + cd)
1758 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1758 refreshchanges = [x.intersection(allmatches) for x in (mm, aa, dd)]
1759
1759
1760 files = set(inclsubs)
1760 files = set(inclsubs)
1761 for x in refreshchanges:
1761 for x in refreshchanges:
1762 files.update(x)
1762 files.update(x)
1763 match = scmutil.matchfiles(repo, files)
1763 match = scmutil.matchfiles(repo, files)
1764
1764
1765 bmlist = repo[top].bookmarks()
1765 bmlist = repo[top].bookmarks()
1766
1766
1767 dsguard = None
1767 dsguard = None
1768 try:
1768 try:
1769 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1769 dsguard = dirstateguard.dirstateguard(repo, 'mq.refresh')
1770 if diffopts.git or diffopts.upgrade:
1770 if diffopts.git or diffopts.upgrade:
1771 copies = {}
1771 copies = {}
1772 for dst in a:
1772 for dst in a:
1773 src = repo.dirstate.copied(dst)
1773 src = repo.dirstate.copied(dst)
1774 # during qfold, the source file for copies may
1774 # during qfold, the source file for copies may
1775 # be removed. Treat this as a simple add.
1775 # be removed. Treat this as a simple add.
1776 if src is not None and src in repo.dirstate:
1776 if src is not None and src in repo.dirstate:
1777 copies.setdefault(src, []).append(dst)
1777 copies.setdefault(src, []).append(dst)
1778 repo.dirstate.add(dst)
1778 repo.dirstate.add(dst)
1779 # remember the copies between patchparent and qtip
1779 # remember the copies between patchparent and qtip
1780 for dst in aaa:
1780 for dst in aaa:
1781 f = repo.file(dst)
1781 f = repo.file(dst)
1782 src = f.renamed(man[dst])
1782 src = f.renamed(man[dst])
1783 if src:
1783 if src:
1784 copies.setdefault(src[0], []).extend(
1784 copies.setdefault(src[0], []).extend(
1785 copies.get(dst, []))
1785 copies.get(dst, []))
1786 if dst in a:
1786 if dst in a:
1787 copies[src[0]].append(dst)
1787 copies[src[0]].append(dst)
1788 # we can't copy a file created by the patch itself
1788 # we can't copy a file created by the patch itself
1789 if dst in copies:
1789 if dst in copies:
1790 del copies[dst]
1790 del copies[dst]
1791 for src, dsts in copies.iteritems():
1791 for src, dsts in copies.iteritems():
1792 for dst in dsts:
1792 for dst in dsts:
1793 repo.dirstate.copy(src, dst)
1793 repo.dirstate.copy(src, dst)
1794 else:
1794 else:
1795 for dst in a:
1795 for dst in a:
1796 repo.dirstate.add(dst)
1796 repo.dirstate.add(dst)
1797 # Drop useless copy information
1797 # Drop useless copy information
1798 for f in list(repo.dirstate.copies()):
1798 for f in list(repo.dirstate.copies()):
1799 repo.dirstate.copy(None, f)
1799 repo.dirstate.copy(None, f)
1800 for f in r:
1800 for f in r:
1801 repo.dirstate.remove(f)
1801 repo.dirstate.remove(f)
1802 # if the patch excludes a modified file, mark that
1802 # if the patch excludes a modified file, mark that
1803 # file with mtime=0 so status can see it.
1803 # file with mtime=0 so status can see it.
1804 mm = []
1804 mm = []
1805 for i in pycompat.xrange(len(m) - 1, -1, -1):
1805 for i in pycompat.xrange(len(m) - 1, -1, -1):
1806 if not match1(m[i]):
1806 if not match1(m[i]):
1807 mm.append(m[i])
1807 mm.append(m[i])
1808 del m[i]
1808 del m[i]
1809 for f in m:
1809 for f in m:
1810 repo.dirstate.normal(f)
1810 repo.dirstate.normal(f)
1811 for f in mm:
1811 for f in mm:
1812 repo.dirstate.normallookup(f)
1812 repo.dirstate.normallookup(f)
1813 for f in forget:
1813 for f in forget:
1814 repo.dirstate.drop(f)
1814 repo.dirstate.drop(f)
1815
1815
1816 user = ph.user or changes[1]
1816 user = ph.user or changes[1]
1817
1817
1818 oldphase = repo[top].phase()
1818 oldphase = repo[top].phase()
1819
1819
1820 # assumes strip can roll itself back if interrupted
1820 # assumes strip can roll itself back if interrupted
1821 repo.setparents(*cparents)
1821 repo.setparents(*cparents)
1822 self.applied.pop()
1822 self.applied.pop()
1823 self.applieddirty = True
1823 self.applieddirty = True
1824 strip(self.ui, repo, [top], update=False, backup=False)
1824 strip(self.ui, repo, [top], update=False, backup=False)
1825 dsguard.close()
1825 dsguard.close()
1826 finally:
1826 finally:
1827 release(dsguard)
1827 release(dsguard)
1828
1828
1829 try:
1829 try:
1830 # might be nice to attempt to roll back strip after this
1830 # might be nice to attempt to roll back strip after this
1831
1831
1832 defaultmsg = "[mq]: %s" % patchfn
1832 defaultmsg = "[mq]: %s" % patchfn
1833 editor = cmdutil.getcommiteditor(editform=editform)
1833 editor = cmdutil.getcommiteditor(editform=editform)
1834 if edit:
1834 if edit:
1835 def finishdesc(desc):
1835 def finishdesc(desc):
1836 if desc.rstrip():
1836 if desc.rstrip():
1837 ph.setmessage(desc)
1837 ph.setmessage(desc)
1838 return desc
1838 return desc
1839 return defaultmsg
1839 return defaultmsg
1840 # i18n: this message is shown in editor with "HG: " prefix
1840 # i18n: this message is shown in editor with "HG: " prefix
1841 extramsg = _('Leave message empty to use default message.')
1841 extramsg = _('Leave message empty to use default message.')
1842 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1842 editor = cmdutil.getcommiteditor(finishdesc=finishdesc,
1843 extramsg=extramsg,
1843 extramsg=extramsg,
1844 editform=editform)
1844 editform=editform)
1845 message = msg or "\n".join(ph.message)
1845 message = msg or "\n".join(ph.message)
1846 elif not msg:
1846 elif not msg:
1847 if not ph.message:
1847 if not ph.message:
1848 message = defaultmsg
1848 message = defaultmsg
1849 else:
1849 else:
1850 message = "\n".join(ph.message)
1850 message = "\n".join(ph.message)
1851 else:
1851 else:
1852 message = msg
1852 message = msg
1853 ph.setmessage(msg)
1853 ph.setmessage(msg)
1854
1854
1855 # Ensure we create a new changeset in the same phase than
1855 # Ensure we create a new changeset in the same phase than
1856 # the old one.
1856 # the old one.
1857 lock = tr = None
1857 lock = tr = None
1858 try:
1858 try:
1859 lock = repo.lock()
1859 lock = repo.lock()
1860 tr = repo.transaction('mq')
1860 tr = repo.transaction('mq')
1861 n = newcommit(repo, oldphase, message, user, ph.date,
1861 n = newcommit(repo, oldphase, message, user, ph.date,
1862 match=match, force=True, editor=editor)
1862 match=match, force=True, editor=editor)
1863 # only write patch after a successful commit
1863 # only write patch after a successful commit
1864 c = [list(x) for x in refreshchanges]
1864 c = [list(x) for x in refreshchanges]
1865 if inclsubs:
1865 if inclsubs:
1866 self.putsubstate2changes(substatestate, c)
1866 self.putsubstate2changes(substatestate, c)
1867 chunks = patchmod.diff(repo, patchparent,
1867 chunks = patchmod.diff(repo, patchparent,
1868 changes=c, opts=diffopts)
1868 changes=c, opts=diffopts)
1869 comments = bytes(ph)
1869 comments = bytes(ph)
1870 if comments:
1870 if comments:
1871 patchf.write(comments)
1871 patchf.write(comments)
1872 for chunk in chunks:
1872 for chunk in chunks:
1873 patchf.write(chunk)
1873 patchf.write(chunk)
1874 patchf.close()
1874 patchf.close()
1875
1875
1876 marks = repo._bookmarks
1876 marks = repo._bookmarks
1877 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
1877 marks.applychanges(repo, tr, [(bm, n) for bm in bmlist])
1878 tr.close()
1878 tr.close()
1879
1879
1880 self.applied.append(statusentry(n, patchfn))
1880 self.applied.append(statusentry(n, patchfn))
1881 finally:
1881 finally:
1882 lockmod.release(tr, lock)
1882 lockmod.release(tr, lock)
1883 except: # re-raises
1883 except: # re-raises
1884 ctx = repo[cparents[0]]
1884 ctx = repo[cparents[0]]
1885 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1885 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1886 self.savedirty()
1886 self.savedirty()
1887 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1887 self.ui.warn(_('qrefresh interrupted while patch was popped! '
1888 '(revert --all, qpush to recover)\n'))
1888 '(revert --all, qpush to recover)\n'))
1889 raise
1889 raise
1890 finally:
1890 finally:
1891 wlock.release()
1891 wlock.release()
1892 self.removeundo(repo)
1892 self.removeundo(repo)
1893
1893
1894 def init(self, repo, create=False):
1894 def init(self, repo, create=False):
1895 if not create and os.path.isdir(self.path):
1895 if not create and os.path.isdir(self.path):
1896 raise error.Abort(_("patch queue directory already exists"))
1896 raise error.Abort(_("patch queue directory already exists"))
1897 try:
1897 try:
1898 os.mkdir(self.path)
1898 os.mkdir(self.path)
1899 except OSError as inst:
1899 except OSError as inst:
1900 if inst.errno != errno.EEXIST or not create:
1900 if inst.errno != errno.EEXIST or not create:
1901 raise
1901 raise
1902 if create:
1902 if create:
1903 return self.qrepo(create=True)
1903 return self.qrepo(create=True)
1904
1904
1905 def unapplied(self, repo, patch=None):
1905 def unapplied(self, repo, patch=None):
1906 if patch and patch not in self.series:
1906 if patch and patch not in self.series:
1907 raise error.Abort(_("patch %s is not in series file") % patch)
1907 raise error.Abort(_("patch %s is not in series file") % patch)
1908 if not patch:
1908 if not patch:
1909 start = self.seriesend()
1909 start = self.seriesend()
1910 else:
1910 else:
1911 start = self.series.index(patch) + 1
1911 start = self.series.index(patch) + 1
1912 unapplied = []
1912 unapplied = []
1913 for i in pycompat.xrange(start, len(self.series)):
1913 for i in pycompat.xrange(start, len(self.series)):
1914 pushable, reason = self.pushable(i)
1914 pushable, reason = self.pushable(i)
1915 if pushable:
1915 if pushable:
1916 unapplied.append((i, self.series[i]))
1916 unapplied.append((i, self.series[i]))
1917 self.explainpushable(i)
1917 self.explainpushable(i)
1918 return unapplied
1918 return unapplied
1919
1919
1920 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1920 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1921 summary=False):
1921 summary=False):
1922 def displayname(pfx, patchname, state):
1922 def displayname(pfx, patchname, state):
1923 if pfx:
1923 if pfx:
1924 self.ui.write(pfx)
1924 self.ui.write(pfx)
1925 if summary:
1925 if summary:
1926 ph = patchheader(self.join(patchname), self.plainmode)
1926 ph = patchheader(self.join(patchname), self.plainmode)
1927 if ph.message:
1927 if ph.message:
1928 msg = ph.message[0]
1928 msg = ph.message[0]
1929 else:
1929 else:
1930 msg = ''
1930 msg = ''
1931
1931
1932 if self.ui.formatted():
1932 if self.ui.formatted():
1933 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1933 width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
1934 if width > 0:
1934 if width > 0:
1935 msg = stringutil.ellipsis(msg, width)
1935 msg = stringutil.ellipsis(msg, width)
1936 else:
1936 else:
1937 msg = ''
1937 msg = ''
1938 self.ui.write(patchname, label='qseries.' + state)
1938 self.ui.write(patchname, label='qseries.' + state)
1939 self.ui.write(': ')
1939 self.ui.write(': ')
1940 self.ui.write(msg, label='qseries.message.' + state)
1940 self.ui.write(msg, label='qseries.message.' + state)
1941 else:
1941 else:
1942 self.ui.write(patchname, label='qseries.' + state)
1942 self.ui.write(patchname, label='qseries.' + state)
1943 self.ui.write('\n')
1943 self.ui.write('\n')
1944
1944
1945 applied = set([p.name for p in self.applied])
1945 applied = set([p.name for p in self.applied])
1946 if length is None:
1946 if length is None:
1947 length = len(self.series) - start
1947 length = len(self.series) - start
1948 if not missing:
1948 if not missing:
1949 if self.ui.verbose:
1949 if self.ui.verbose:
1950 idxwidth = len("%d" % (start + length - 1))
1950 idxwidth = len("%d" % (start + length - 1))
1951 for i in pycompat.xrange(start, start + length):
1951 for i in pycompat.xrange(start, start + length):
1952 patch = self.series[i]
1952 patch = self.series[i]
1953 if patch in applied:
1953 if patch in applied:
1954 char, state = 'A', 'applied'
1954 char, state = 'A', 'applied'
1955 elif self.pushable(i)[0]:
1955 elif self.pushable(i)[0]:
1956 char, state = 'U', 'unapplied'
1956 char, state = 'U', 'unapplied'
1957 else:
1957 else:
1958 char, state = 'G', 'guarded'
1958 char, state = 'G', 'guarded'
1959 pfx = ''
1959 pfx = ''
1960 if self.ui.verbose:
1960 if self.ui.verbose:
1961 pfx = '%*d %s ' % (idxwidth, i, char)
1961 pfx = '%*d %s ' % (idxwidth, i, char)
1962 elif status and status != char:
1962 elif status and status != char:
1963 continue
1963 continue
1964 displayname(pfx, patch, state)
1964 displayname(pfx, patch, state)
1965 else:
1965 else:
1966 msng_list = []
1966 msng_list = []
1967 for root, dirs, files in os.walk(self.path):
1967 for root, dirs, files in os.walk(self.path):
1968 d = root[len(self.path) + 1:]
1968 d = root[len(self.path) + 1:]
1969 for f in files:
1969 for f in files:
1970 fl = os.path.join(d, f)
1970 fl = os.path.join(d, f)
1971 if (fl not in self.series and
1971 if (fl not in self.series and
1972 fl not in (self.statuspath, self.seriespath,
1972 fl not in (self.statuspath, self.seriespath,
1973 self.guardspath)
1973 self.guardspath)
1974 and not fl.startswith('.')):
1974 and not fl.startswith('.')):
1975 msng_list.append(fl)
1975 msng_list.append(fl)
1976 for x in sorted(msng_list):
1976 for x in sorted(msng_list):
1977 pfx = self.ui.verbose and ('D ') or ''
1977 pfx = self.ui.verbose and ('D ') or ''
1978 displayname(pfx, x, 'missing')
1978 displayname(pfx, x, 'missing')
1979
1979
1980 def issaveline(self, l):
1980 def issaveline(self, l):
1981 if l.name == '.hg.patches.save.line':
1981 if l.name == '.hg.patches.save.line':
1982 return True
1982 return True
1983
1983
1984 def qrepo(self, create=False):
1984 def qrepo(self, create=False):
1985 ui = self.baseui.copy()
1985 ui = self.baseui.copy()
1986 # copy back attributes set by ui.pager()
1986 # copy back attributes set by ui.pager()
1987 if self.ui.pageractive and not ui.pageractive:
1987 if self.ui.pageractive and not ui.pageractive:
1988 ui.pageractive = self.ui.pageractive
1988 ui.pageractive = self.ui.pageractive
1989 # internal config: ui.formatted
1989 # internal config: ui.formatted
1990 ui.setconfig('ui', 'formatted',
1990 ui.setconfig('ui', 'formatted',
1991 self.ui.config('ui', 'formatted'), 'mqpager')
1991 self.ui.config('ui', 'formatted'), 'mqpager')
1992 ui.setconfig('ui', 'interactive',
1992 ui.setconfig('ui', 'interactive',
1993 self.ui.config('ui', 'interactive'), 'mqpager')
1993 self.ui.config('ui', 'interactive'), 'mqpager')
1994 if create or os.path.isdir(self.join(".hg")):
1994 if create or os.path.isdir(self.join(".hg")):
1995 return hg.repository(ui, path=self.path, create=create)
1995 return hg.repository(ui, path=self.path, create=create)
1996
1996
1997 def restore(self, repo, rev, delete=None, qupdate=None):
1997 def restore(self, repo, rev, delete=None, qupdate=None):
1998 desc = repo[rev].description().strip()
1998 desc = repo[rev].description().strip()
1999 lines = desc.splitlines()
1999 lines = desc.splitlines()
2000 i = 0
2000 i = 0
2001 datastart = None
2001 datastart = None
2002 series = []
2002 series = []
2003 applied = []
2003 applied = []
2004 qpp = None
2004 qpp = None
2005 for i, line in enumerate(lines):
2005 for i, line in enumerate(lines):
2006 if line == 'Patch Data:':
2006 if line == 'Patch Data:':
2007 datastart = i + 1
2007 datastart = i + 1
2008 elif line.startswith('Dirstate:'):
2008 elif line.startswith('Dirstate:'):
2009 l = line.rstrip()
2009 l = line.rstrip()
2010 l = l[10:].split(' ')
2010 l = l[10:].split(' ')
2011 qpp = [bin(x) for x in l]
2011 qpp = [bin(x) for x in l]
2012 elif datastart is not None:
2012 elif datastart is not None:
2013 l = line.rstrip()
2013 l = line.rstrip()
2014 n, name = l.split(':', 1)
2014 n, name = l.split(':', 1)
2015 if n:
2015 if n:
2016 applied.append(statusentry(bin(n), name))
2016 applied.append(statusentry(bin(n), name))
2017 else:
2017 else:
2018 series.append(l)
2018 series.append(l)
2019 if datastart is None:
2019 if datastart is None:
2020 self.ui.warn(_("no saved patch data found\n"))
2020 self.ui.warn(_("no saved patch data found\n"))
2021 return 1
2021 return 1
2022 self.ui.warn(_("restoring status: %s\n") % lines[0])
2022 self.ui.warn(_("restoring status: %s\n") % lines[0])
2023 self.fullseries = series
2023 self.fullseries = series
2024 self.applied = applied
2024 self.applied = applied
2025 self.parseseries()
2025 self.parseseries()
2026 self.seriesdirty = True
2026 self.seriesdirty = True
2027 self.applieddirty = True
2027 self.applieddirty = True
2028 heads = repo.changelog.heads()
2028 heads = repo.changelog.heads()
2029 if delete:
2029 if delete:
2030 if rev not in heads:
2030 if rev not in heads:
2031 self.ui.warn(_("save entry has children, leaving it alone\n"))
2031 self.ui.warn(_("save entry has children, leaving it alone\n"))
2032 else:
2032 else:
2033 self.ui.warn(_("removing save entry %s\n") % short(rev))
2033 self.ui.warn(_("removing save entry %s\n") % short(rev))
2034 pp = repo.dirstate.parents()
2034 pp = repo.dirstate.parents()
2035 if rev in pp:
2035 if rev in pp:
2036 update = True
2036 update = True
2037 else:
2037 else:
2038 update = False
2038 update = False
2039 strip(self.ui, repo, [rev], update=update, backup=False)
2039 strip(self.ui, repo, [rev], update=update, backup=False)
2040 if qpp:
2040 if qpp:
2041 self.ui.warn(_("saved queue repository parents: %s %s\n") %
2041 self.ui.warn(_("saved queue repository parents: %s %s\n") %
2042 (short(qpp[0]), short(qpp[1])))
2042 (short(qpp[0]), short(qpp[1])))
2043 if qupdate:
2043 if qupdate:
2044 self.ui.status(_("updating queue directory\n"))
2044 self.ui.status(_("updating queue directory\n"))
2045 r = self.qrepo()
2045 r = self.qrepo()
2046 if not r:
2046 if not r:
2047 self.ui.warn(_("unable to load queue repository\n"))
2047 self.ui.warn(_("unable to load queue repository\n"))
2048 return 1
2048 return 1
2049 hg.clean(r, qpp[0])
2049 hg.clean(r, qpp[0])
2050
2050
2051 def save(self, repo, msg=None):
2051 def save(self, repo, msg=None):
2052 if not self.applied:
2052 if not self.applied:
2053 self.ui.warn(_("save: no patches applied, exiting\n"))
2053 self.ui.warn(_("save: no patches applied, exiting\n"))
2054 return 1
2054 return 1
2055 if self.issaveline(self.applied[-1]):
2055 if self.issaveline(self.applied[-1]):
2056 self.ui.warn(_("status is already saved\n"))
2056 self.ui.warn(_("status is already saved\n"))
2057 return 1
2057 return 1
2058
2058
2059 if not msg:
2059 if not msg:
2060 msg = _("hg patches saved state")
2060 msg = _("hg patches saved state")
2061 else:
2061 else:
2062 msg = "hg patches: " + msg.rstrip('\r\n')
2062 msg = "hg patches: " + msg.rstrip('\r\n')
2063 r = self.qrepo()
2063 r = self.qrepo()
2064 if r:
2064 if r:
2065 pp = r.dirstate.parents()
2065 pp = r.dirstate.parents()
2066 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2066 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
2067 msg += "\n\nPatch Data:\n"
2067 msg += "\n\nPatch Data:\n"
2068 msg += ''.join('%s\n' % x for x in self.applied)
2068 msg += ''.join('%s\n' % x for x in self.applied)
2069 msg += ''.join(':%s\n' % x for x in self.fullseries)
2069 msg += ''.join(':%s\n' % x for x in self.fullseries)
2070 n = repo.commit(msg, force=True)
2070 n = repo.commit(msg, force=True)
2071 if not n:
2071 if not n:
2072 self.ui.warn(_("repo commit failed\n"))
2072 self.ui.warn(_("repo commit failed\n"))
2073 return 1
2073 return 1
2074 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2074 self.applied.append(statusentry(n, '.hg.patches.save.line'))
2075 self.applieddirty = True
2075 self.applieddirty = True
2076 self.removeundo(repo)
2076 self.removeundo(repo)
2077
2077
2078 def fullseriesend(self):
2078 def fullseriesend(self):
2079 if self.applied:
2079 if self.applied:
2080 p = self.applied[-1].name
2080 p = self.applied[-1].name
2081 end = self.findseries(p)
2081 end = self.findseries(p)
2082 if end is None:
2082 if end is None:
2083 return len(self.fullseries)
2083 return len(self.fullseries)
2084 return end + 1
2084 return end + 1
2085 return 0
2085 return 0
2086
2086
2087 def seriesend(self, all_patches=False):
2087 def seriesend(self, all_patches=False):
2088 """If all_patches is False, return the index of the next pushable patch
2088 """If all_patches is False, return the index of the next pushable patch
2089 in the series, or the series length. If all_patches is True, return the
2089 in the series, or the series length. If all_patches is True, return the
2090 index of the first patch past the last applied one.
2090 index of the first patch past the last applied one.
2091 """
2091 """
2092 end = 0
2092 end = 0
2093 def nextpatch(start):
2093 def nextpatch(start):
2094 if all_patches or start >= len(self.series):
2094 if all_patches or start >= len(self.series):
2095 return start
2095 return start
2096 for i in pycompat.xrange(start, len(self.series)):
2096 for i in pycompat.xrange(start, len(self.series)):
2097 p, reason = self.pushable(i)
2097 p, reason = self.pushable(i)
2098 if p:
2098 if p:
2099 return i
2099 return i
2100 self.explainpushable(i)
2100 self.explainpushable(i)
2101 return len(self.series)
2101 return len(self.series)
2102 if self.applied:
2102 if self.applied:
2103 p = self.applied[-1].name
2103 p = self.applied[-1].name
2104 try:
2104 try:
2105 end = self.series.index(p)
2105 end = self.series.index(p)
2106 except ValueError:
2106 except ValueError:
2107 return 0
2107 return 0
2108 return nextpatch(end + 1)
2108 return nextpatch(end + 1)
2109 return nextpatch(end)
2109 return nextpatch(end)
2110
2110
2111 def appliedname(self, index):
2111 def appliedname(self, index):
2112 pname = self.applied[index].name
2112 pname = self.applied[index].name
2113 if not self.ui.verbose:
2113 if not self.ui.verbose:
2114 p = pname
2114 p = pname
2115 else:
2115 else:
2116 p = ("%d" % self.series.index(pname)) + " " + pname
2116 p = ("%d" % self.series.index(pname)) + " " + pname
2117 return p
2117 return p
2118
2118
2119 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2119 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
2120 force=None, git=False):
2120 force=None, git=False):
2121 def checkseries(patchname):
2121 def checkseries(patchname):
2122 if patchname in self.series:
2122 if patchname in self.series:
2123 raise error.Abort(_('patch %s is already in the series file')
2123 raise error.Abort(_('patch %s is already in the series file')
2124 % patchname)
2124 % patchname)
2125
2125
2126 if rev:
2126 if rev:
2127 if files:
2127 if files:
2128 raise error.Abort(_('option "-r" not valid when importing '
2128 raise error.Abort(_('option "-r" not valid when importing '
2129 'files'))
2129 'files'))
2130 rev = scmutil.revrange(repo, rev)
2130 rev = scmutil.revrange(repo, rev)
2131 rev.sort(reverse=True)
2131 rev.sort(reverse=True)
2132 elif not files:
2132 elif not files:
2133 raise error.Abort(_('no files or revisions specified'))
2133 raise error.Abort(_('no files or revisions specified'))
2134 if (len(files) > 1 or len(rev) > 1) and patchname:
2134 if (len(files) > 1 or len(rev) > 1) and patchname:
2135 raise error.Abort(_('option "-n" not valid when importing multiple '
2135 raise error.Abort(_('option "-n" not valid when importing multiple '
2136 'patches'))
2136 'patches'))
2137 imported = []
2137 imported = []
2138 if rev:
2138 if rev:
2139 # If mq patches are applied, we can only import revisions
2139 # If mq patches are applied, we can only import revisions
2140 # that form a linear path to qbase.
2140 # that form a linear path to qbase.
2141 # Otherwise, they should form a linear path to a head.
2141 # Otherwise, they should form a linear path to a head.
2142 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2142 heads = repo.changelog.heads(repo.changelog.node(rev.first()))
2143 if len(heads) > 1:
2143 if len(heads) > 1:
2144 raise error.Abort(_('revision %d is the root of more than one '
2144 raise error.Abort(_('revision %d is the root of more than one '
2145 'branch') % rev.last())
2145 'branch') % rev.last())
2146 if self.applied:
2146 if self.applied:
2147 base = repo.changelog.node(rev.first())
2147 base = repo.changelog.node(rev.first())
2148 if base in [n.node for n in self.applied]:
2148 if base in [n.node for n in self.applied]:
2149 raise error.Abort(_('revision %d is already managed')
2149 raise error.Abort(_('revision %d is already managed')
2150 % rev.first())
2150 % rev.first())
2151 if heads != [self.applied[-1].node]:
2151 if heads != [self.applied[-1].node]:
2152 raise error.Abort(_('revision %d is not the parent of '
2152 raise error.Abort(_('revision %d is not the parent of '
2153 'the queue') % rev.first())
2153 'the queue') % rev.first())
2154 base = repo.changelog.rev(self.applied[0].node)
2154 base = repo.changelog.rev(self.applied[0].node)
2155 lastparent = repo.changelog.parentrevs(base)[0]
2155 lastparent = repo.changelog.parentrevs(base)[0]
2156 else:
2156 else:
2157 if heads != [repo.changelog.node(rev.first())]:
2157 if heads != [repo.changelog.node(rev.first())]:
2158 raise error.Abort(_('revision %d has unmanaged children')
2158 raise error.Abort(_('revision %d has unmanaged children')
2159 % rev.first())
2159 % rev.first())
2160 lastparent = None
2160 lastparent = None
2161
2161
2162 diffopts = self.diffopts({'git': git})
2162 diffopts = self.diffopts({'git': git})
2163 with repo.transaction('qimport') as tr:
2163 with repo.transaction('qimport') as tr:
2164 for r in rev:
2164 for r in rev:
2165 if not repo[r].mutable():
2165 if not repo[r].mutable():
2166 raise error.Abort(_('revision %d is not mutable') % r,
2166 raise error.Abort(_('revision %d is not mutable') % r,
2167 hint=_("see 'hg help phases' "
2167 hint=_("see 'hg help phases' "
2168 'for details'))
2168 'for details'))
2169 p1, p2 = repo.changelog.parentrevs(r)
2169 p1, p2 = repo.changelog.parentrevs(r)
2170 n = repo.changelog.node(r)
2170 n = repo.changelog.node(r)
2171 if p2 != nullrev:
2171 if p2 != nullrev:
2172 raise error.Abort(_('cannot import merge revision %d')
2172 raise error.Abort(_('cannot import merge revision %d')
2173 % r)
2173 % r)
2174 if lastparent and lastparent != r:
2174 if lastparent and lastparent != r:
2175 raise error.Abort(_('revision %d is not the parent of '
2175 raise error.Abort(_('revision %d is not the parent of '
2176 '%d')
2176 '%d')
2177 % (r, lastparent))
2177 % (r, lastparent))
2178 lastparent = p1
2178 lastparent = p1
2179
2179
2180 if not patchname:
2180 if not patchname:
2181 patchname = self.makepatchname(
2181 patchname = self.makepatchname(
2182 repo[r].description().split('\n', 1)[0],
2182 repo[r].description().split('\n', 1)[0],
2183 '%d.diff' % r)
2183 '%d.diff' % r)
2184 checkseries(patchname)
2184 checkseries(patchname)
2185 self.checkpatchname(patchname, force)
2185 self.checkpatchname(patchname, force)
2186 self.fullseries.insert(0, patchname)
2186 self.fullseries.insert(0, patchname)
2187
2187
2188 with self.opener(patchname, "w") as fp:
2188 with self.opener(patchname, "w") as fp:
2189 cmdutil.exportfile(repo, [n], fp, opts=diffopts)
2189 cmdutil.exportfile(repo, [n], fp, opts=diffopts)
2190
2190
2191 se = statusentry(n, patchname)
2191 se = statusentry(n, patchname)
2192 self.applied.insert(0, se)
2192 self.applied.insert(0, se)
2193
2193
2194 self.added.append(patchname)
2194 self.added.append(patchname)
2195 imported.append(patchname)
2195 imported.append(patchname)
2196 patchname = None
2196 patchname = None
2197 if rev and repo.ui.configbool('mq', 'secret'):
2197 if rev and repo.ui.configbool('mq', 'secret'):
2198 # if we added anything with --rev, move the secret root
2198 # if we added anything with --rev, move the secret root
2199 phases.retractboundary(repo, tr, phases.secret, [n])
2199 phases.retractboundary(repo, tr, phases.secret, [n])
2200 self.parseseries()
2200 self.parseseries()
2201 self.applieddirty = True
2201 self.applieddirty = True
2202 self.seriesdirty = True
2202 self.seriesdirty = True
2203
2203
2204 for i, filename in enumerate(files):
2204 for i, filename in enumerate(files):
2205 if existing:
2205 if existing:
2206 if filename == '-':
2206 if filename == '-':
2207 raise error.Abort(_('-e is incompatible with import from -')
2207 raise error.Abort(_('-e is incompatible with import from -')
2208 )
2208 )
2209 filename = normname(filename)
2209 filename = normname(filename)
2210 self.checkreservedname(filename)
2210 self.checkreservedname(filename)
2211 if util.url(filename).islocal():
2211 if util.url(filename).islocal():
2212 originpath = self.join(filename)
2212 originpath = self.join(filename)
2213 if not os.path.isfile(originpath):
2213 if not os.path.isfile(originpath):
2214 raise error.Abort(
2214 raise error.Abort(
2215 _("patch %s does not exist") % filename)
2215 _("patch %s does not exist") % filename)
2216
2216
2217 if patchname:
2217 if patchname:
2218 self.checkpatchname(patchname, force)
2218 self.checkpatchname(patchname, force)
2219
2219
2220 self.ui.write(_('renaming %s to %s\n')
2220 self.ui.write(_('renaming %s to %s\n')
2221 % (filename, patchname))
2221 % (filename, patchname))
2222 util.rename(originpath, self.join(patchname))
2222 util.rename(originpath, self.join(patchname))
2223 else:
2223 else:
2224 patchname = filename
2224 patchname = filename
2225
2225
2226 else:
2226 else:
2227 if filename == '-' and not patchname:
2227 if filename == '-' and not patchname:
2228 raise error.Abort(_('need --name to import a patch from -'))
2228 raise error.Abort(_('need --name to import a patch from -'))
2229 elif not patchname:
2229 elif not patchname:
2230 patchname = normname(os.path.basename(filename.rstrip('/')))
2230 patchname = normname(os.path.basename(filename.rstrip('/')))
2231 self.checkpatchname(patchname, force)
2231 self.checkpatchname(patchname, force)
2232 try:
2232 try:
2233 if filename == '-':
2233 if filename == '-':
2234 text = self.ui.fin.read()
2234 text = self.ui.fin.read()
2235 else:
2235 else:
2236 fp = hg.openpath(self.ui, filename)
2236 fp = hg.openpath(self.ui, filename)
2237 text = fp.read()
2237 text = fp.read()
2238 fp.close()
2238 fp.close()
2239 except (OSError, IOError):
2239 except (OSError, IOError):
2240 raise error.Abort(_("unable to read file %s") % filename)
2240 raise error.Abort(_("unable to read file %s") % filename)
2241 patchf = self.opener(patchname, "w")
2241 patchf = self.opener(patchname, "w")
2242 patchf.write(text)
2242 patchf.write(text)
2243 patchf.close()
2243 patchf.close()
2244 if not force:
2244 if not force:
2245 checkseries(patchname)
2245 checkseries(patchname)
2246 if patchname not in self.series:
2246 if patchname not in self.series:
2247 index = self.fullseriesend() + i
2247 index = self.fullseriesend() + i
2248 self.fullseries[index:index] = [patchname]
2248 self.fullseries[index:index] = [patchname]
2249 self.parseseries()
2249 self.parseseries()
2250 self.seriesdirty = True
2250 self.seriesdirty = True
2251 self.ui.warn(_("adding %s to series file\n") % patchname)
2251 self.ui.warn(_("adding %s to series file\n") % patchname)
2252 self.added.append(patchname)
2252 self.added.append(patchname)
2253 imported.append(patchname)
2253 imported.append(patchname)
2254 patchname = None
2254 patchname = None
2255
2255
2256 self.removeundo(repo)
2256 self.removeundo(repo)
2257 return imported
2257 return imported
2258
2258
2259 def fixkeepchangesopts(ui, opts):
2259 def fixkeepchangesopts(ui, opts):
2260 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2260 if (not ui.configbool('mq', 'keepchanges') or opts.get('force')
2261 or opts.get('exact')):
2261 or opts.get('exact')):
2262 return opts
2262 return opts
2263 opts = dict(opts)
2263 opts = dict(opts)
2264 opts['keep_changes'] = True
2264 opts['keep_changes'] = True
2265 return opts
2265 return opts
2266
2266
2267 @command("qdelete|qremove|qrm",
2267 @command("qdelete|qremove|qrm",
2268 [('k', 'keep', None, _('keep patch file')),
2268 [('k', 'keep', None, _('keep patch file')),
2269 ('r', 'rev', [],
2269 ('r', 'rev', [],
2270 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2270 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2271 _('hg qdelete [-k] [PATCH]...'),
2271 _('hg qdelete [-k] [PATCH]...'),
2272 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2272 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2273 def delete(ui, repo, *patches, **opts):
2273 def delete(ui, repo, *patches, **opts):
2274 """remove patches from queue
2274 """remove patches from queue
2275
2275
2276 The patches must not be applied, and at least one patch is required. Exact
2276 The patches must not be applied, and at least one patch is required. Exact
2277 patch identifiers must be given. With -k/--keep, the patch files are
2277 patch identifiers must be given. With -k/--keep, the patch files are
2278 preserved in the patch directory.
2278 preserved in the patch directory.
2279
2279
2280 To stop managing a patch and move it into permanent history,
2280 To stop managing a patch and move it into permanent history,
2281 use the :hg:`qfinish` command."""
2281 use the :hg:`qfinish` command."""
2282 q = repo.mq
2282 q = repo.mq
2283 q.delete(repo, patches, pycompat.byteskwargs(opts))
2283 q.delete(repo, patches, pycompat.byteskwargs(opts))
2284 q.savedirty()
2284 q.savedirty()
2285 return 0
2285 return 0
2286
2286
2287 @command("qapplied",
2287 @command("qapplied",
2288 [('1', 'last', None, _('show only the preceding applied patch'))
2288 [('1', 'last', None, _('show only the preceding applied patch'))
2289 ] + seriesopts,
2289 ] + seriesopts,
2290 _('hg qapplied [-1] [-s] [PATCH]'),
2290 _('hg qapplied [-1] [-s] [PATCH]'),
2291 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2291 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2292 def applied(ui, repo, patch=None, **opts):
2292 def applied(ui, repo, patch=None, **opts):
2293 """print the patches already applied
2293 """print the patches already applied
2294
2294
2295 Returns 0 on success."""
2295 Returns 0 on success."""
2296
2296
2297 q = repo.mq
2297 q = repo.mq
2298 opts = pycompat.byteskwargs(opts)
2298 opts = pycompat.byteskwargs(opts)
2299
2299
2300 if patch:
2300 if patch:
2301 if patch not in q.series:
2301 if patch not in q.series:
2302 raise error.Abort(_("patch %s is not in series file") % patch)
2302 raise error.Abort(_("patch %s is not in series file") % patch)
2303 end = q.series.index(patch) + 1
2303 end = q.series.index(patch) + 1
2304 else:
2304 else:
2305 end = q.seriesend(True)
2305 end = q.seriesend(True)
2306
2306
2307 if opts.get('last') and not end:
2307 if opts.get('last') and not end:
2308 ui.write(_("no patches applied\n"))
2308 ui.write(_("no patches applied\n"))
2309 return 1
2309 return 1
2310 elif opts.get('last') and end == 1:
2310 elif opts.get('last') and end == 1:
2311 ui.write(_("only one patch applied\n"))
2311 ui.write(_("only one patch applied\n"))
2312 return 1
2312 return 1
2313 elif opts.get('last'):
2313 elif opts.get('last'):
2314 start = end - 2
2314 start = end - 2
2315 end = 1
2315 end = 1
2316 else:
2316 else:
2317 start = 0
2317 start = 0
2318
2318
2319 q.qseries(repo, length=end, start=start, status='A',
2319 q.qseries(repo, length=end, start=start, status='A',
2320 summary=opts.get('summary'))
2320 summary=opts.get('summary'))
2321
2321
2322
2322
2323 @command("qunapplied",
2323 @command("qunapplied",
2324 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2324 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2325 _('hg qunapplied [-1] [-s] [PATCH]'),
2325 _('hg qunapplied [-1] [-s] [PATCH]'),
2326 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2326 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2327 def unapplied(ui, repo, patch=None, **opts):
2327 def unapplied(ui, repo, patch=None, **opts):
2328 """print the patches not yet applied
2328 """print the patches not yet applied
2329
2329
2330 Returns 0 on success."""
2330 Returns 0 on success."""
2331
2331
2332 q = repo.mq
2332 q = repo.mq
2333 opts = pycompat.byteskwargs(opts)
2333 opts = pycompat.byteskwargs(opts)
2334 if patch:
2334 if patch:
2335 if patch not in q.series:
2335 if patch not in q.series:
2336 raise error.Abort(_("patch %s is not in series file") % patch)
2336 raise error.Abort(_("patch %s is not in series file") % patch)
2337 start = q.series.index(patch) + 1
2337 start = q.series.index(patch) + 1
2338 else:
2338 else:
2339 start = q.seriesend(True)
2339 start = q.seriesend(True)
2340
2340
2341 if start == len(q.series) and opts.get('first'):
2341 if start == len(q.series) and opts.get('first'):
2342 ui.write(_("all patches applied\n"))
2342 ui.write(_("all patches applied\n"))
2343 return 1
2343 return 1
2344
2344
2345 if opts.get('first'):
2345 if opts.get('first'):
2346 length = 1
2346 length = 1
2347 else:
2347 else:
2348 length = None
2348 length = None
2349 q.qseries(repo, start=start, length=length, status='U',
2349 q.qseries(repo, start=start, length=length, status='U',
2350 summary=opts.get('summary'))
2350 summary=opts.get('summary'))
2351
2351
2352 @command("qimport",
2352 @command("qimport",
2353 [('e', 'existing', None, _('import file in patch directory')),
2353 [('e', 'existing', None, _('import file in patch directory')),
2354 ('n', 'name', '',
2354 ('n', 'name', '',
2355 _('name of patch file'), _('NAME')),
2355 _('name of patch file'), _('NAME')),
2356 ('f', 'force', None, _('overwrite existing files')),
2356 ('f', 'force', None, _('overwrite existing files')),
2357 ('r', 'rev', [],
2357 ('r', 'rev', [],
2358 _('place existing revisions under mq control'), _('REV')),
2358 _('place existing revisions under mq control'), _('REV')),
2359 ('g', 'git', None, _('use git extended diff format')),
2359 ('g', 'git', None, _('use git extended diff format')),
2360 ('P', 'push', None, _('qpush after importing'))],
2360 ('P', 'push', None, _('qpush after importing'))],
2361 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
2361 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... [FILE]...'),
2362 helpcategory=command.CATEGORY_IMPORT_EXPORT)
2362 helpcategory=command.CATEGORY_IMPORT_EXPORT)
2363 def qimport(ui, repo, *filename, **opts):
2363 def qimport(ui, repo, *filename, **opts):
2364 """import a patch or existing changeset
2364 """import a patch or existing changeset
2365
2365
2366 The patch is inserted into the series after the last applied
2366 The patch is inserted into the series after the last applied
2367 patch. If no patches have been applied, qimport prepends the patch
2367 patch. If no patches have been applied, qimport prepends the patch
2368 to the series.
2368 to the series.
2369
2369
2370 The patch will have the same name as its source file unless you
2370 The patch will have the same name as its source file unless you
2371 give it a new one with -n/--name.
2371 give it a new one with -n/--name.
2372
2372
2373 You can register an existing patch inside the patch directory with
2373 You can register an existing patch inside the patch directory with
2374 the -e/--existing flag.
2374 the -e/--existing flag.
2375
2375
2376 With -f/--force, an existing patch of the same name will be
2376 With -f/--force, an existing patch of the same name will be
2377 overwritten.
2377 overwritten.
2378
2378
2379 An existing changeset may be placed under mq control with -r/--rev
2379 An existing changeset may be placed under mq control with -r/--rev
2380 (e.g. qimport --rev . -n patch will place the current revision
2380 (e.g. qimport --rev . -n patch will place the current revision
2381 under mq control). With -g/--git, patches imported with --rev will
2381 under mq control). With -g/--git, patches imported with --rev will
2382 use the git diff format. See the diffs help topic for information
2382 use the git diff format. See the diffs help topic for information
2383 on why this is important for preserving rename/copy information
2383 on why this is important for preserving rename/copy information
2384 and permission changes. Use :hg:`qfinish` to remove changesets
2384 and permission changes. Use :hg:`qfinish` to remove changesets
2385 from mq control.
2385 from mq control.
2386
2386
2387 To import a patch from standard input, pass - as the patch file.
2387 To import a patch from standard input, pass - as the patch file.
2388 When importing from standard input, a patch name must be specified
2388 When importing from standard input, a patch name must be specified
2389 using the --name flag.
2389 using the --name flag.
2390
2390
2391 To import an existing patch while renaming it::
2391 To import an existing patch while renaming it::
2392
2392
2393 hg qimport -e existing-patch -n new-name
2393 hg qimport -e existing-patch -n new-name
2394
2394
2395 Returns 0 if import succeeded.
2395 Returns 0 if import succeeded.
2396 """
2396 """
2397 opts = pycompat.byteskwargs(opts)
2397 opts = pycompat.byteskwargs(opts)
2398 with repo.lock(): # cause this may move phase
2398 with repo.lock(): # cause this may move phase
2399 q = repo.mq
2399 q = repo.mq
2400 try:
2400 try:
2401 imported = q.qimport(
2401 imported = q.qimport(
2402 repo, filename, patchname=opts.get('name'),
2402 repo, filename, patchname=opts.get('name'),
2403 existing=opts.get('existing'), force=opts.get('force'),
2403 existing=opts.get('existing'), force=opts.get('force'),
2404 rev=opts.get('rev'), git=opts.get('git'))
2404 rev=opts.get('rev'), git=opts.get('git'))
2405 finally:
2405 finally:
2406 q.savedirty()
2406 q.savedirty()
2407
2407
2408 if imported and opts.get('push') and not opts.get('rev'):
2408 if imported and opts.get('push') and not opts.get('rev'):
2409 return q.push(repo, imported[-1])
2409 return q.push(repo, imported[-1])
2410 return 0
2410 return 0
2411
2411
2412 def qinit(ui, repo, create):
2412 def qinit(ui, repo, create):
2413 """initialize a new queue repository
2413 """initialize a new queue repository
2414
2414
2415 This command also creates a series file for ordering patches, and
2415 This command also creates a series file for ordering patches, and
2416 an mq-specific .hgignore file in the queue repository, to exclude
2416 an mq-specific .hgignore file in the queue repository, to exclude
2417 the status and guards files (these contain mostly transient state).
2417 the status and guards files (these contain mostly transient state).
2418
2418
2419 Returns 0 if initialization succeeded."""
2419 Returns 0 if initialization succeeded."""
2420 q = repo.mq
2420 q = repo.mq
2421 r = q.init(repo, create)
2421 r = q.init(repo, create)
2422 q.savedirty()
2422 q.savedirty()
2423 if r:
2423 if r:
2424 if not os.path.exists(r.wjoin('.hgignore')):
2424 if not os.path.exists(r.wjoin('.hgignore')):
2425 fp = r.wvfs('.hgignore', 'w')
2425 fp = r.wvfs('.hgignore', 'w')
2426 fp.write('^\\.hg\n')
2426 fp.write('^\\.hg\n')
2427 fp.write('^\\.mq\n')
2427 fp.write('^\\.mq\n')
2428 fp.write('syntax: glob\n')
2428 fp.write('syntax: glob\n')
2429 fp.write('status\n')
2429 fp.write('status\n')
2430 fp.write('guards\n')
2430 fp.write('guards\n')
2431 fp.close()
2431 fp.close()
2432 if not os.path.exists(r.wjoin('series')):
2432 if not os.path.exists(r.wjoin('series')):
2433 r.wvfs('series', 'w').close()
2433 r.wvfs('series', 'w').close()
2434 r[None].add(['.hgignore', 'series'])
2434 r[None].add(['.hgignore', 'series'])
2435 commands.add(ui, r)
2435 commands.add(ui, r)
2436 return 0
2436 return 0
2437
2437
2438 @command("qinit",
2438 @command("qinit",
2439 [('c', 'create-repo', None, _('create queue repository'))],
2439 [('c', 'create-repo', None, _('create queue repository'))],
2440 _('hg qinit [-c]'),
2440 _('hg qinit [-c]'),
2441 helpcategory=command.CATEGORY_REPO_CREATION,
2441 helpcategory=command.CATEGORY_REPO_CREATION,
2442 helpbasic=True)
2442 helpbasic=True)
2443 def init(ui, repo, **opts):
2443 def init(ui, repo, **opts):
2444 """init a new queue repository (DEPRECATED)
2444 """init a new queue repository (DEPRECATED)
2445
2445
2446 The queue repository is unversioned by default. If
2446 The queue repository is unversioned by default. If
2447 -c/--create-repo is specified, qinit will create a separate nested
2447 -c/--create-repo is specified, qinit will create a separate nested
2448 repository for patches (qinit -c may also be run later to convert
2448 repository for patches (qinit -c may also be run later to convert
2449 an unversioned patch repository into a versioned one). You can use
2449 an unversioned patch repository into a versioned one). You can use
2450 qcommit to commit changes to this queue repository.
2450 qcommit to commit changes to this queue repository.
2451
2451
2452 This command is deprecated. Without -c, it's implied by other relevant
2452 This command is deprecated. Without -c, it's implied by other relevant
2453 commands. With -c, use :hg:`init --mq` instead."""
2453 commands. With -c, use :hg:`init --mq` instead."""
2454 return qinit(ui, repo, create=opts.get(r'create_repo'))
2454 return qinit(ui, repo, create=opts.get(r'create_repo'))
2455
2455
2456 @command("qclone",
2456 @command("qclone",
2457 [('', 'pull', None, _('use pull protocol to copy metadata')),
2457 [('', 'pull', None, _('use pull protocol to copy metadata')),
2458 ('U', 'noupdate', None,
2458 ('U', 'noupdate', None,
2459 _('do not update the new working directories')),
2459 _('do not update the new working directories')),
2460 ('', 'uncompressed', None,
2460 ('', 'uncompressed', None,
2461 _('use uncompressed transfer (fast over LAN)')),
2461 _('use uncompressed transfer (fast over LAN)')),
2462 ('p', 'patches', '',
2462 ('p', 'patches', '',
2463 _('location of source patch repository'), _('REPO')),
2463 _('location of source patch repository'), _('REPO')),
2464 ] + cmdutil.remoteopts,
2464 ] + cmdutil.remoteopts,
2465 _('hg qclone [OPTION]... SOURCE [DEST]'),
2465 _('hg qclone [OPTION]... SOURCE [DEST]'),
2466 helpcategory=command.CATEGORY_REPO_CREATION,
2466 helpcategory=command.CATEGORY_REPO_CREATION,
2467 norepo=True)
2467 norepo=True)
2468 def clone(ui, source, dest=None, **opts):
2468 def clone(ui, source, dest=None, **opts):
2469 '''clone main and patch repository at same time
2469 '''clone main and patch repository at same time
2470
2470
2471 If source is local, destination will have no patches applied. If
2471 If source is local, destination will have no patches applied. If
2472 source is remote, this command can not check if patches are
2472 source is remote, this command can not check if patches are
2473 applied in source, so cannot guarantee that patches are not
2473 applied in source, so cannot guarantee that patches are not
2474 applied in destination. If you clone remote repository, be sure
2474 applied in destination. If you clone remote repository, be sure
2475 before that it has no patches applied.
2475 before that it has no patches applied.
2476
2476
2477 Source patch repository is looked for in <src>/.hg/patches by
2477 Source patch repository is looked for in <src>/.hg/patches by
2478 default. Use -p <url> to change.
2478 default. Use -p <url> to change.
2479
2479
2480 The patch directory must be a nested Mercurial repository, as
2480 The patch directory must be a nested Mercurial repository, as
2481 would be created by :hg:`init --mq`.
2481 would be created by :hg:`init --mq`.
2482
2482
2483 Return 0 on success.
2483 Return 0 on success.
2484 '''
2484 '''
2485 opts = pycompat.byteskwargs(opts)
2485 opts = pycompat.byteskwargs(opts)
2486 def patchdir(repo):
2486 def patchdir(repo):
2487 """compute a patch repo url from a repo object"""
2487 """compute a patch repo url from a repo object"""
2488 url = repo.url()
2488 url = repo.url()
2489 if url.endswith('/'):
2489 if url.endswith('/'):
2490 url = url[:-1]
2490 url = url[:-1]
2491 return url + '/.hg/patches'
2491 return url + '/.hg/patches'
2492
2492
2493 # main repo (destination and sources)
2493 # main repo (destination and sources)
2494 if dest is None:
2494 if dest is None:
2495 dest = hg.defaultdest(source)
2495 dest = hg.defaultdest(source)
2496 sr = hg.peer(ui, opts, ui.expandpath(source))
2496 sr = hg.peer(ui, opts, ui.expandpath(source))
2497
2497
2498 # patches repo (source only)
2498 # patches repo (source only)
2499 if opts.get('patches'):
2499 if opts.get('patches'):
2500 patchespath = ui.expandpath(opts.get('patches'))
2500 patchespath = ui.expandpath(opts.get('patches'))
2501 else:
2501 else:
2502 patchespath = patchdir(sr)
2502 patchespath = patchdir(sr)
2503 try:
2503 try:
2504 hg.peer(ui, opts, patchespath)
2504 hg.peer(ui, opts, patchespath)
2505 except error.RepoError:
2505 except error.RepoError:
2506 raise error.Abort(_('versioned patch repository not found'
2506 raise error.Abort(_('versioned patch repository not found'
2507 ' (see init --mq)'))
2507 ' (see init --mq)'))
2508 qbase, destrev = None, None
2508 qbase, destrev = None, None
2509 if sr.local():
2509 if sr.local():
2510 repo = sr.local()
2510 repo = sr.local()
2511 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2511 if repo.mq.applied and repo[qbase].phase() != phases.secret:
2512 qbase = repo.mq.applied[0].node
2512 qbase = repo.mq.applied[0].node
2513 if not hg.islocal(dest):
2513 if not hg.islocal(dest):
2514 heads = set(repo.heads())
2514 heads = set(repo.heads())
2515 destrev = list(heads.difference(repo.heads(qbase)))
2515 destrev = list(heads.difference(repo.heads(qbase)))
2516 destrev.append(repo.changelog.parents(qbase)[0])
2516 destrev.append(repo.changelog.parents(qbase)[0])
2517 elif sr.capable('lookup'):
2517 elif sr.capable('lookup'):
2518 try:
2518 try:
2519 qbase = sr.lookup('qbase')
2519 qbase = sr.lookup('qbase')
2520 except error.RepoError:
2520 except error.RepoError:
2521 pass
2521 pass
2522
2522
2523 ui.note(_('cloning main repository\n'))
2523 ui.note(_('cloning main repository\n'))
2524 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2524 sr, dr = hg.clone(ui, opts, sr.url(), dest,
2525 pull=opts.get('pull'),
2525 pull=opts.get('pull'),
2526 revs=destrev,
2526 revs=destrev,
2527 update=False,
2527 update=False,
2528 stream=opts.get('uncompressed'))
2528 stream=opts.get('uncompressed'))
2529
2529
2530 ui.note(_('cloning patch repository\n'))
2530 ui.note(_('cloning patch repository\n'))
2531 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2531 hg.clone(ui, opts, opts.get('patches') or patchdir(sr), patchdir(dr),
2532 pull=opts.get('pull'), update=not opts.get('noupdate'),
2532 pull=opts.get('pull'), update=not opts.get('noupdate'),
2533 stream=opts.get('uncompressed'))
2533 stream=opts.get('uncompressed'))
2534
2534
2535 if dr.local():
2535 if dr.local():
2536 repo = dr.local()
2536 repo = dr.local()
2537 if qbase:
2537 if qbase:
2538 ui.note(_('stripping applied patches from destination '
2538 ui.note(_('stripping applied patches from destination '
2539 'repository\n'))
2539 'repository\n'))
2540 strip(ui, repo, [qbase], update=False, backup=None)
2540 strip(ui, repo, [qbase], update=False, backup=None)
2541 if not opts.get('noupdate'):
2541 if not opts.get('noupdate'):
2542 ui.note(_('updating destination repository\n'))
2542 ui.note(_('updating destination repository\n'))
2543 hg.update(repo, repo.changelog.tip())
2543 hg.update(repo, repo.changelog.tip())
2544
2544
2545 @command("qcommit|qci",
2545 @command("qcommit|qci",
2546 commands.table["commit|ci"][1],
2546 commands.table["commit|ci"][1],
2547 _('hg qcommit [OPTION]... [FILE]...'),
2547 _('hg qcommit [OPTION]... [FILE]...'),
2548 helpcategory=command.CATEGORY_COMMITTING,
2548 helpcategory=command.CATEGORY_COMMITTING,
2549 inferrepo=True)
2549 inferrepo=True)
2550 def commit(ui, repo, *pats, **opts):
2550 def commit(ui, repo, *pats, **opts):
2551 """commit changes in the queue repository (DEPRECATED)
2551 """commit changes in the queue repository (DEPRECATED)
2552
2552
2553 This command is deprecated; use :hg:`commit --mq` instead."""
2553 This command is deprecated; use :hg:`commit --mq` instead."""
2554 q = repo.mq
2554 q = repo.mq
2555 r = q.qrepo()
2555 r = q.qrepo()
2556 if not r:
2556 if not r:
2557 raise error.Abort('no queue repository')
2557 raise error.Abort('no queue repository')
2558 commands.commit(r.ui, r, *pats, **opts)
2558 commands.commit(r.ui, r, *pats, **opts)
2559
2559
2560 @command("qseries",
2560 @command("qseries",
2561 [('m', 'missing', None, _('print patches not in series')),
2561 [('m', 'missing', None, _('print patches not in series')),
2562 ] + seriesopts,
2562 ] + seriesopts,
2563 _('hg qseries [-ms]'),
2563 _('hg qseries [-ms]'),
2564 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2564 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2565 def series(ui, repo, **opts):
2565 def series(ui, repo, **opts):
2566 """print the entire series file
2566 """print the entire series file
2567
2567
2568 Returns 0 on success."""
2568 Returns 0 on success."""
2569 repo.mq.qseries(repo, missing=opts.get(r'missing'),
2569 repo.mq.qseries(repo, missing=opts.get(r'missing'),
2570 summary=opts.get(r'summary'))
2570 summary=opts.get(r'summary'))
2571 return 0
2571 return 0
2572
2572
2573 @command("qtop", seriesopts, _('hg qtop [-s]'),
2573 @command("qtop", seriesopts, _('hg qtop [-s]'),
2574 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2574 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2575 def top(ui, repo, **opts):
2575 def top(ui, repo, **opts):
2576 """print the name of the current patch
2576 """print the name of the current patch
2577
2577
2578 Returns 0 on success."""
2578 Returns 0 on success."""
2579 q = repo.mq
2579 q = repo.mq
2580 if q.applied:
2580 if q.applied:
2581 t = q.seriesend(True)
2581 t = q.seriesend(True)
2582 else:
2582 else:
2583 t = 0
2583 t = 0
2584
2584
2585 if t:
2585 if t:
2586 q.qseries(repo, start=t - 1, length=1, status='A',
2586 q.qseries(repo, start=t - 1, length=1, status='A',
2587 summary=opts.get(r'summary'))
2587 summary=opts.get(r'summary'))
2588 else:
2588 else:
2589 ui.write(_("no patches applied\n"))
2589 ui.write(_("no patches applied\n"))
2590 return 1
2590 return 1
2591
2591
2592 @command("qnext", seriesopts, _('hg qnext [-s]'),
2592 @command("qnext", seriesopts, _('hg qnext [-s]'),
2593 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2593 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2594 def next(ui, repo, **opts):
2594 def next(ui, repo, **opts):
2595 """print the name of the next pushable patch
2595 """print the name of the next pushable patch
2596
2596
2597 Returns 0 on success."""
2597 Returns 0 on success."""
2598 q = repo.mq
2598 q = repo.mq
2599 end = q.seriesend()
2599 end = q.seriesend()
2600 if end == len(q.series):
2600 if end == len(q.series):
2601 ui.write(_("all patches applied\n"))
2601 ui.write(_("all patches applied\n"))
2602 return 1
2602 return 1
2603 q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
2603 q.qseries(repo, start=end, length=1, summary=opts.get(r'summary'))
2604
2604
2605 @command("qprev", seriesopts, _('hg qprev [-s]'),
2605 @command("qprev", seriesopts, _('hg qprev [-s]'),
2606 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2606 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2607 def prev(ui, repo, **opts):
2607 def prev(ui, repo, **opts):
2608 """print the name of the preceding applied patch
2608 """print the name of the preceding applied patch
2609
2609
2610 Returns 0 on success."""
2610 Returns 0 on success."""
2611 q = repo.mq
2611 q = repo.mq
2612 l = len(q.applied)
2612 l = len(q.applied)
2613 if l == 1:
2613 if l == 1:
2614 ui.write(_("only one patch applied\n"))
2614 ui.write(_("only one patch applied\n"))
2615 return 1
2615 return 1
2616 if not l:
2616 if not l:
2617 ui.write(_("no patches applied\n"))
2617 ui.write(_("no patches applied\n"))
2618 return 1
2618 return 1
2619 idx = q.series.index(q.applied[-2].name)
2619 idx = q.series.index(q.applied[-2].name)
2620 q.qseries(repo, start=idx, length=1, status='A',
2620 q.qseries(repo, start=idx, length=1, status='A',
2621 summary=opts.get(r'summary'))
2621 summary=opts.get(r'summary'))
2622
2622
2623 def setupheaderopts(ui, opts):
2623 def setupheaderopts(ui, opts):
2624 if not opts.get('user') and opts.get('currentuser'):
2624 if not opts.get('user') and opts.get('currentuser'):
2625 opts['user'] = ui.username()
2625 opts['user'] = ui.username()
2626 if not opts.get('date') and opts.get('currentdate'):
2626 if not opts.get('date') and opts.get('currentdate'):
2627 opts['date'] = "%d %d" % dateutil.makedate()
2627 opts['date'] = "%d %d" % dateutil.makedate()
2628
2628
2629 @command("qnew",
2629 @command("qnew",
2630 [('e', 'edit', None, _('invoke editor on commit messages')),
2630 [('e', 'edit', None, _('invoke editor on commit messages')),
2631 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2631 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2632 ('g', 'git', None, _('use git extended diff format')),
2632 ('g', 'git', None, _('use git extended diff format')),
2633 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2633 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2634 ('u', 'user', '',
2634 ('u', 'user', '',
2635 _('add "From: <USER>" to patch'), _('USER')),
2635 _('add "From: <USER>" to patch'), _('USER')),
2636 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2636 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2637 ('d', 'date', '',
2637 ('d', 'date', '',
2638 _('add "Date: <DATE>" to patch'), _('DATE'))
2638 _('add "Date: <DATE>" to patch'), _('DATE'))
2639 ] + cmdutil.walkopts + cmdutil.commitopts,
2639 ] + cmdutil.walkopts + cmdutil.commitopts,
2640 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2640 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...'),
2641 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2641 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2642 inferrepo=True)
2642 inferrepo=True)
2643 def new(ui, repo, patch, *args, **opts):
2643 def new(ui, repo, patch, *args, **opts):
2644 """create a new patch
2644 """create a new patch
2645
2645
2646 qnew creates a new patch on top of the currently-applied patch (if
2646 qnew creates a new patch on top of the currently-applied patch (if
2647 any). The patch will be initialized with any outstanding changes
2647 any). The patch will be initialized with any outstanding changes
2648 in the working directory. You may also use -I/--include,
2648 in the working directory. You may also use -I/--include,
2649 -X/--exclude, and/or a list of files after the patch name to add
2649 -X/--exclude, and/or a list of files after the patch name to add
2650 only changes to matching files to the new patch, leaving the rest
2650 only changes to matching files to the new patch, leaving the rest
2651 as uncommitted modifications.
2651 as uncommitted modifications.
2652
2652
2653 -u/--user and -d/--date can be used to set the (given) user and
2653 -u/--user and -d/--date can be used to set the (given) user and
2654 date, respectively. -U/--currentuser and -D/--currentdate set user
2654 date, respectively. -U/--currentuser and -D/--currentdate set user
2655 to current user and date to current date.
2655 to current user and date to current date.
2656
2656
2657 -e/--edit, -m/--message or -l/--logfile set the patch header as
2657 -e/--edit, -m/--message or -l/--logfile set the patch header as
2658 well as the commit message. If none is specified, the header is
2658 well as the commit message. If none is specified, the header is
2659 empty and the commit message is '[mq]: PATCH'.
2659 empty and the commit message is '[mq]: PATCH'.
2660
2660
2661 Use the -g/--git option to keep the patch in the git extended diff
2661 Use the -g/--git option to keep the patch in the git extended diff
2662 format. Read the diffs help topic for more information on why this
2662 format. Read the diffs help topic for more information on why this
2663 is important for preserving permission changes and copy/rename
2663 is important for preserving permission changes and copy/rename
2664 information.
2664 information.
2665
2665
2666 Returns 0 on successful creation of a new patch.
2666 Returns 0 on successful creation of a new patch.
2667 """
2667 """
2668 opts = pycompat.byteskwargs(opts)
2668 opts = pycompat.byteskwargs(opts)
2669 msg = cmdutil.logmessage(ui, opts)
2669 msg = cmdutil.logmessage(ui, opts)
2670 q = repo.mq
2670 q = repo.mq
2671 opts['msg'] = msg
2671 opts['msg'] = msg
2672 setupheaderopts(ui, opts)
2672 setupheaderopts(ui, opts)
2673 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
2673 q.new(repo, patch, *args, **pycompat.strkwargs(opts))
2674 q.savedirty()
2674 q.savedirty()
2675 return 0
2675 return 0
2676
2676
2677 @command("qrefresh",
2677 @command("qrefresh",
2678 [('e', 'edit', None, _('invoke editor on commit messages')),
2678 [('e', 'edit', None, _('invoke editor on commit messages')),
2679 ('g', 'git', None, _('use git extended diff format')),
2679 ('g', 'git', None, _('use git extended diff format')),
2680 ('s', 'short', None,
2680 ('s', 'short', None,
2681 _('refresh only files already in the patch and specified files')),
2681 _('refresh only files already in the patch and specified files')),
2682 ('U', 'currentuser', None,
2682 ('U', 'currentuser', None,
2683 _('add/update author field in patch with current user')),
2683 _('add/update author field in patch with current user')),
2684 ('u', 'user', '',
2684 ('u', 'user', '',
2685 _('add/update author field in patch with given user'), _('USER')),
2685 _('add/update author field in patch with given user'), _('USER')),
2686 ('D', 'currentdate', None,
2686 ('D', 'currentdate', None,
2687 _('add/update date field in patch with current date')),
2687 _('add/update date field in patch with current date')),
2688 ('d', 'date', '',
2688 ('d', 'date', '',
2689 _('add/update date field in patch with given date'), _('DATE'))
2689 _('add/update date field in patch with given date'), _('DATE'))
2690 ] + cmdutil.walkopts + cmdutil.commitopts,
2690 ] + cmdutil.walkopts + cmdutil.commitopts,
2691 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2691 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2692 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2692 helpcategory=command.CATEGORY_COMMITTING, helpbasic=True,
2693 inferrepo=True)
2693 inferrepo=True)
2694 def refresh(ui, repo, *pats, **opts):
2694 def refresh(ui, repo, *pats, **opts):
2695 """update the current patch
2695 """update the current patch
2696
2696
2697 If any file patterns are provided, the refreshed patch will
2697 If any file patterns are provided, the refreshed patch will
2698 contain only the modifications that match those patterns; the
2698 contain only the modifications that match those patterns; the
2699 remaining modifications will remain in the working directory.
2699 remaining modifications will remain in the working directory.
2700
2700
2701 If -s/--short is specified, files currently included in the patch
2701 If -s/--short is specified, files currently included in the patch
2702 will be refreshed just like matched files and remain in the patch.
2702 will be refreshed just like matched files and remain in the patch.
2703
2703
2704 If -e/--edit is specified, Mercurial will start your configured editor for
2704 If -e/--edit is specified, Mercurial will start your configured editor for
2705 you to enter a message. In case qrefresh fails, you will find a backup of
2705 you to enter a message. In case qrefresh fails, you will find a backup of
2706 your message in ``.hg/last-message.txt``.
2706 your message in ``.hg/last-message.txt``.
2707
2707
2708 hg add/remove/copy/rename work as usual, though you might want to
2708 hg add/remove/copy/rename work as usual, though you might want to
2709 use git-style patches (-g/--git or [diff] git=1) to track copies
2709 use git-style patches (-g/--git or [diff] git=1) to track copies
2710 and renames. See the diffs help topic for more information on the
2710 and renames. See the diffs help topic for more information on the
2711 git diff format.
2711 git diff format.
2712
2712
2713 Returns 0 on success.
2713 Returns 0 on success.
2714 """
2714 """
2715 opts = pycompat.byteskwargs(opts)
2715 opts = pycompat.byteskwargs(opts)
2716 q = repo.mq
2716 q = repo.mq
2717 message = cmdutil.logmessage(ui, opts)
2717 message = cmdutil.logmessage(ui, opts)
2718 setupheaderopts(ui, opts)
2718 setupheaderopts(ui, opts)
2719 with repo.wlock():
2719 with repo.wlock():
2720 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
2720 ret = q.refresh(repo, pats, msg=message, **pycompat.strkwargs(opts))
2721 q.savedirty()
2721 q.savedirty()
2722 return ret
2722 return ret
2723
2723
2724 @command("qdiff",
2724 @command("qdiff",
2725 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2725 cmdutil.diffopts + cmdutil.diffopts2 + cmdutil.walkopts,
2726 _('hg qdiff [OPTION]... [FILE]...'),
2726 _('hg qdiff [OPTION]... [FILE]...'),
2727 helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True,
2727 helpcategory=command.CATEGORY_FILE_CONTENTS, helpbasic=True,
2728 inferrepo=True)
2728 inferrepo=True)
2729 def diff(ui, repo, *pats, **opts):
2729 def diff(ui, repo, *pats, **opts):
2730 """diff of the current patch and subsequent modifications
2730 """diff of the current patch and subsequent modifications
2731
2731
2732 Shows a diff which includes the current patch as well as any
2732 Shows a diff which includes the current patch as well as any
2733 changes which have been made in the working directory since the
2733 changes which have been made in the working directory since the
2734 last refresh (thus showing what the current patch would become
2734 last refresh (thus showing what the current patch would become
2735 after a qrefresh).
2735 after a qrefresh).
2736
2736
2737 Use :hg:`diff` if you only want to see the changes made since the
2737 Use :hg:`diff` if you only want to see the changes made since the
2738 last qrefresh, or :hg:`export qtip` if you want to see changes
2738 last qrefresh, or :hg:`export qtip` if you want to see changes
2739 made by the current patch without including changes made since the
2739 made by the current patch without including changes made since the
2740 qrefresh.
2740 qrefresh.
2741
2741
2742 Returns 0 on success.
2742 Returns 0 on success.
2743 """
2743 """
2744 ui.pager('qdiff')
2744 ui.pager('qdiff')
2745 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
2745 repo.mq.diff(repo, pats, pycompat.byteskwargs(opts))
2746 return 0
2746 return 0
2747
2747
2748 @command('qfold',
2748 @command('qfold',
2749 [('e', 'edit', None, _('invoke editor on commit messages')),
2749 [('e', 'edit', None, _('invoke editor on commit messages')),
2750 ('k', 'keep', None, _('keep folded patch files')),
2750 ('k', 'keep', None, _('keep folded patch files')),
2751 ] + cmdutil.commitopts,
2751 ] + cmdutil.commitopts,
2752 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
2752 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...'),
2753 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
2753 helpcategory=command.CATEGORY_CHANGE_MANAGEMENT)
2754 def fold(ui, repo, *files, **opts):
2754 def fold(ui, repo, *files, **opts):
2755 """fold the named patches into the current patch
2755 """fold the named patches into the current patch
2756
2756
2757 Patches must not yet be applied. Each patch will be successively
2757 Patches must not yet be applied. Each patch will be successively
2758 applied to the current patch in the order given. If all the
2758 applied to the current patch in the order given. If all the
2759 patches apply successfully, the current patch will be refreshed
2759 patches apply successfully, the current patch will be refreshed
2760 with the new cumulative patch, and the folded patches will be
2760 with the new cumulative patch, and the folded patches will be
2761 deleted. With -k/--keep, the folded patch files will not be
2761 deleted. With -k/--keep, the folded patch files will not be
2762 removed afterwards.
2762 removed afterwards.
2763
2763
2764 The header for each folded patch will be concatenated with the
2764 The header for each folded patch will be concatenated with the
2765 current patch header, separated by a line of ``* * *``.
2765 current patch header, separated by a line of ``* * *``.
2766
2766
2767 Returns 0 on success."""
2767 Returns 0 on success."""
2768 opts = pycompat.byteskwargs(opts)
2768 opts = pycompat.byteskwargs(opts)
2769 q = repo.mq
2769 q = repo.mq
2770 if not files:
2770 if not files:
2771 raise error.Abort(_('qfold requires at least one patch name'))
2771 raise error.Abort(_('qfold requires at least one patch name'))
2772 if not q.checktoppatch(repo)[0]:
2772 if not q.checktoppatch(repo)[0]:
2773 raise error.Abort(_('no patches applied'))
2773 raise error.Abort(_('no patches applied'))
2774 q.checklocalchanges(repo)
2774 q.checklocalchanges(repo)
2775
2775
2776 message = cmdutil.logmessage(ui, opts)
2776 message = cmdutil.logmessage(ui, opts)
2777
2777
2778 parent = q.lookup('qtip')
2778 parent = q.lookup('qtip')
2779 patches = []
2779 patches = []
2780 messages = []
2780 messages = []
2781 for f in files:
2781 for f in files:
2782 p = q.lookup(f)
2782 p = q.lookup(f)
2783 if p in patches or p == parent:
2783 if p in patches or p == parent:
2784 ui.warn(_('skipping already folded patch %s\n') % p)
2784 ui.warn(_('skipping already folded patch %s\n') % p)
2785 if q.isapplied(p):
2785 if q.isapplied(p):
2786 raise error.Abort(_('qfold cannot fold already applied patch %s')
2786 raise error.Abort(_('qfold cannot fold already applied patch %s')
2787 % p)
2787 % p)
2788 patches.append(p)
2788 patches.append(p)
2789
2789
2790 for p in patches:
2790 for p in patches:
2791 if not message:
2791 if not message:
2792 ph = patchheader(q.join(p), q.plainmode)
2792 ph = patchheader(q.join(p), q.plainmode)
2793 if ph.message:
2793 if ph.message:
2794 messages.append(ph.message)
2794 messages.append(ph.message)
2795 pf = q.join(p)
2795 pf = q.join(p)
2796 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2796 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2797 if not patchsuccess:
2797 if not patchsuccess:
2798 raise error.Abort(_('error folding patch %s') % p)
2798 raise error.Abort(_('error folding patch %s') % p)
2799
2799
2800 if not message:
2800 if not message:
2801 ph = patchheader(q.join(parent), q.plainmode)
2801 ph = patchheader(q.join(parent), q.plainmode)
2802 message = ph.message
2802 message = ph.message
2803 for msg in messages:
2803 for msg in messages:
2804 if msg:
2804 if msg:
2805 if message:
2805 if message:
2806 message.append('* * *')
2806 message.append('* * *')
2807 message.extend(msg)
2807 message.extend(msg)
2808 message = '\n'.join(message)
2808 message = '\n'.join(message)
2809
2809
2810 diffopts = q.patchopts(q.diffopts(), *patches)
2810 diffopts = q.patchopts(q.diffopts(), *patches)
2811 with repo.wlock():
2811 with repo.wlock():
2812 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2812 q.refresh(repo, msg=message, git=diffopts.git, edit=opts.get('edit'),
2813 editform='mq.qfold')
2813 editform='mq.qfold')
2814 q.delete(repo, patches, opts)
2814 q.delete(repo, patches, opts)
2815 q.savedirty()
2815 q.savedirty()
2816
2816
2817 @command("qgoto",
2817 @command("qgoto",
2818 [('', 'keep-changes', None,
2818 [('', 'keep-changes', None,
2819 _('tolerate non-conflicting local changes')),
2819 _('tolerate non-conflicting local changes')),
2820 ('f', 'force', None, _('overwrite any local changes')),
2820 ('f', 'force', None, _('overwrite any local changes')),
2821 ('', 'no-backup', None, _('do not save backup copies of files'))],
2821 ('', 'no-backup', None, _('do not save backup copies of files'))],
2822 _('hg qgoto [OPTION]... PATCH'),
2822 _('hg qgoto [OPTION]... PATCH'),
2823 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2823 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2824 def goto(ui, repo, patch, **opts):
2824 def goto(ui, repo, patch, **opts):
2825 '''push or pop patches until named patch is at top of stack
2825 '''push or pop patches until named patch is at top of stack
2826
2826
2827 Returns 0 on success.'''
2827 Returns 0 on success.'''
2828 opts = pycompat.byteskwargs(opts)
2828 opts = pycompat.byteskwargs(opts)
2829 opts = fixkeepchangesopts(ui, opts)
2829 opts = fixkeepchangesopts(ui, opts)
2830 q = repo.mq
2830 q = repo.mq
2831 patch = q.lookup(patch)
2831 patch = q.lookup(patch)
2832 nobackup = opts.get('no_backup')
2832 nobackup = opts.get('no_backup')
2833 keepchanges = opts.get('keep_changes')
2833 keepchanges = opts.get('keep_changes')
2834 if q.isapplied(patch):
2834 if q.isapplied(patch):
2835 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2835 ret = q.pop(repo, patch, force=opts.get('force'), nobackup=nobackup,
2836 keepchanges=keepchanges)
2836 keepchanges=keepchanges)
2837 else:
2837 else:
2838 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2838 ret = q.push(repo, patch, force=opts.get('force'), nobackup=nobackup,
2839 keepchanges=keepchanges)
2839 keepchanges=keepchanges)
2840 q.savedirty()
2840 q.savedirty()
2841 return ret
2841 return ret
2842
2842
2843 @command("qguard",
2843 @command("qguard",
2844 [('l', 'list', None, _('list all patches and guards')),
2844 [('l', 'list', None, _('list all patches and guards')),
2845 ('n', 'none', None, _('drop all guards'))],
2845 ('n', 'none', None, _('drop all guards'))],
2846 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
2846 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]'),
2847 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2847 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2848 def guard(ui, repo, *args, **opts):
2848 def guard(ui, repo, *args, **opts):
2849 '''set or print guards for a patch
2849 '''set or print guards for a patch
2850
2850
2851 Guards control whether a patch can be pushed. A patch with no
2851 Guards control whether a patch can be pushed. A patch with no
2852 guards is always pushed. A patch with a positive guard ("+foo") is
2852 guards is always pushed. A patch with a positive guard ("+foo") is
2853 pushed only if the :hg:`qselect` command has activated it. A patch with
2853 pushed only if the :hg:`qselect` command has activated it. A patch with
2854 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2854 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2855 has activated it.
2855 has activated it.
2856
2856
2857 With no arguments, print the currently active guards.
2857 With no arguments, print the currently active guards.
2858 With arguments, set guards for the named patch.
2858 With arguments, set guards for the named patch.
2859
2859
2860 .. note::
2860 .. note::
2861
2861
2862 Specifying negative guards now requires '--'.
2862 Specifying negative guards now requires '--'.
2863
2863
2864 To set guards on another patch::
2864 To set guards on another patch::
2865
2865
2866 hg qguard other.patch -- +2.6.17 -stable
2866 hg qguard other.patch -- +2.6.17 -stable
2867
2867
2868 Returns 0 on success.
2868 Returns 0 on success.
2869 '''
2869 '''
2870 def status(idx):
2870 def status(idx):
2871 guards = q.seriesguards[idx] or ['unguarded']
2871 guards = q.seriesguards[idx] or ['unguarded']
2872 if q.series[idx] in applied:
2872 if q.series[idx] in applied:
2873 state = 'applied'
2873 state = 'applied'
2874 elif q.pushable(idx)[0]:
2874 elif q.pushable(idx)[0]:
2875 state = 'unapplied'
2875 state = 'unapplied'
2876 else:
2876 else:
2877 state = 'guarded'
2877 state = 'guarded'
2878 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2878 label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
2879 ui.write('%s: ' % ui.label(q.series[idx], label))
2879 ui.write('%s: ' % ui.label(q.series[idx], label))
2880
2880
2881 for i, guard in enumerate(guards):
2881 for i, guard in enumerate(guards):
2882 if guard.startswith('+'):
2882 if guard.startswith('+'):
2883 ui.write(guard, label='qguard.positive')
2883 ui.write(guard, label='qguard.positive')
2884 elif guard.startswith('-'):
2884 elif guard.startswith('-'):
2885 ui.write(guard, label='qguard.negative')
2885 ui.write(guard, label='qguard.negative')
2886 else:
2886 else:
2887 ui.write(guard, label='qguard.unguarded')
2887 ui.write(guard, label='qguard.unguarded')
2888 if i != len(guards) - 1:
2888 if i != len(guards) - 1:
2889 ui.write(' ')
2889 ui.write(' ')
2890 ui.write('\n')
2890 ui.write('\n')
2891 q = repo.mq
2891 q = repo.mq
2892 applied = set(p.name for p in q.applied)
2892 applied = set(p.name for p in q.applied)
2893 patch = None
2893 patch = None
2894 args = list(args)
2894 args = list(args)
2895 if opts.get(r'list'):
2895 if opts.get(r'list'):
2896 if args or opts.get(r'none'):
2896 if args or opts.get(r'none'):
2897 raise error.Abort(_('cannot mix -l/--list with options or '
2897 raise error.Abort(_('cannot mix -l/--list with options or '
2898 'arguments'))
2898 'arguments'))
2899 for i in pycompat.xrange(len(q.series)):
2899 for i in pycompat.xrange(len(q.series)):
2900 status(i)
2900 status(i)
2901 return
2901 return
2902 if not args or args[0][0:1] in '-+':
2902 if not args or args[0][0:1] in '-+':
2903 if not q.applied:
2903 if not q.applied:
2904 raise error.Abort(_('no patches applied'))
2904 raise error.Abort(_('no patches applied'))
2905 patch = q.applied[-1].name
2905 patch = q.applied[-1].name
2906 if patch is None and args[0][0:1] not in '-+':
2906 if patch is None and args[0][0:1] not in '-+':
2907 patch = args.pop(0)
2907 patch = args.pop(0)
2908 if patch is None:
2908 if patch is None:
2909 raise error.Abort(_('no patch to work with'))
2909 raise error.Abort(_('no patch to work with'))
2910 if args or opts.get(r'none'):
2910 if args or opts.get(r'none'):
2911 idx = q.findseries(patch)
2911 idx = q.findseries(patch)
2912 if idx is None:
2912 if idx is None:
2913 raise error.Abort(_('no patch named %s') % patch)
2913 raise error.Abort(_('no patch named %s') % patch)
2914 q.setguards(idx, args)
2914 q.setguards(idx, args)
2915 q.savedirty()
2915 q.savedirty()
2916 else:
2916 else:
2917 status(q.series.index(q.lookup(patch)))
2917 status(q.series.index(q.lookup(patch)))
2918
2918
2919 @command("qheader", [], _('hg qheader [PATCH]'),
2919 @command("qheader", [], _('hg qheader [PATCH]'),
2920 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2920 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
2921 def header(ui, repo, patch=None):
2921 def header(ui, repo, patch=None):
2922 """print the header of the topmost or specified patch
2922 """print the header of the topmost or specified patch
2923
2923
2924 Returns 0 on success."""
2924 Returns 0 on success."""
2925 q = repo.mq
2925 q = repo.mq
2926
2926
2927 if patch:
2927 if patch:
2928 patch = q.lookup(patch)
2928 patch = q.lookup(patch)
2929 else:
2929 else:
2930 if not q.applied:
2930 if not q.applied:
2931 ui.write(_('no patches applied\n'))
2931 ui.write(_('no patches applied\n'))
2932 return 1
2932 return 1
2933 patch = q.lookup('qtip')
2933 patch = q.lookup('qtip')
2934 ph = patchheader(q.join(patch), q.plainmode)
2934 ph = patchheader(q.join(patch), q.plainmode)
2935
2935
2936 ui.write('\n'.join(ph.message) + '\n')
2936 ui.write('\n'.join(ph.message) + '\n')
2937
2937
2938 def lastsavename(path):
2938 def lastsavename(path):
2939 (directory, base) = os.path.split(path)
2939 (directory, base) = os.path.split(path)
2940 names = os.listdir(directory)
2940 names = os.listdir(directory)
2941 namere = re.compile("%s.([0-9]+)" % base)
2941 namere = re.compile("%s.([0-9]+)" % base)
2942 maxindex = None
2942 maxindex = None
2943 maxname = None
2943 maxname = None
2944 for f in names:
2944 for f in names:
2945 m = namere.match(f)
2945 m = namere.match(f)
2946 if m:
2946 if m:
2947 index = int(m.group(1))
2947 index = int(m.group(1))
2948 if maxindex is None or index > maxindex:
2948 if maxindex is None or index > maxindex:
2949 maxindex = index
2949 maxindex = index
2950 maxname = f
2950 maxname = f
2951 if maxname:
2951 if maxname:
2952 return (os.path.join(directory, maxname), maxindex)
2952 return (os.path.join(directory, maxname), maxindex)
2953 return (None, None)
2953 return (None, None)
2954
2954
2955 def savename(path):
2955 def savename(path):
2956 (last, index) = lastsavename(path)
2956 (last, index) = lastsavename(path)
2957 if last is None:
2957 if last is None:
2958 index = 0
2958 index = 0
2959 newpath = path + ".%d" % (index + 1)
2959 newpath = path + ".%d" % (index + 1)
2960 return newpath
2960 return newpath
2961
2961
2962 @command("qpush",
2962 @command("qpush",
2963 [('', 'keep-changes', None,
2963 [('', 'keep-changes', None,
2964 _('tolerate non-conflicting local changes')),
2964 _('tolerate non-conflicting local changes')),
2965 ('f', 'force', None, _('apply on top of local changes')),
2965 ('f', 'force', None, _('apply on top of local changes')),
2966 ('e', 'exact', None,
2966 ('e', 'exact', None,
2967 _('apply the target patch to its recorded parent')),
2967 _('apply the target patch to its recorded parent')),
2968 ('l', 'list', None, _('list patch name in commit text')),
2968 ('l', 'list', None, _('list patch name in commit text')),
2969 ('a', 'all', None, _('apply all patches')),
2969 ('a', 'all', None, _('apply all patches')),
2970 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2970 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2971 ('n', 'name', '',
2971 ('n', 'name', '',
2972 _('merge queue name (DEPRECATED)'), _('NAME')),
2972 _('merge queue name (DEPRECATED)'), _('NAME')),
2973 ('', 'move', None,
2973 ('', 'move', None,
2974 _('reorder patch series and apply only the patch')),
2974 _('reorder patch series and apply only the patch')),
2975 ('', 'no-backup', None, _('do not save backup copies of files'))],
2975 ('', 'no-backup', None, _('do not save backup copies of files'))],
2976 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
2976 _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]'),
2977 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2977 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
2978 helpbasic=True)
2978 helpbasic=True)
2979 def push(ui, repo, patch=None, **opts):
2979 def push(ui, repo, patch=None, **opts):
2980 """push the next patch onto the stack
2980 """push the next patch onto the stack
2981
2981
2982 By default, abort if the working directory contains uncommitted
2982 By default, abort if the working directory contains uncommitted
2983 changes. With --keep-changes, abort only if the uncommitted files
2983 changes. With --keep-changes, abort only if the uncommitted files
2984 overlap with patched files. With -f/--force, backup and patch over
2984 overlap with patched files. With -f/--force, backup and patch over
2985 uncommitted changes.
2985 uncommitted changes.
2986
2986
2987 Return 0 on success.
2987 Return 0 on success.
2988 """
2988 """
2989 q = repo.mq
2989 q = repo.mq
2990 mergeq = None
2990 mergeq = None
2991
2991
2992 opts = pycompat.byteskwargs(opts)
2992 opts = pycompat.byteskwargs(opts)
2993 opts = fixkeepchangesopts(ui, opts)
2993 opts = fixkeepchangesopts(ui, opts)
2994 if opts.get('merge'):
2994 if opts.get('merge'):
2995 if opts.get('name'):
2995 if opts.get('name'):
2996 newpath = repo.vfs.join(opts.get('name'))
2996 newpath = repo.vfs.join(opts.get('name'))
2997 else:
2997 else:
2998 newpath, i = lastsavename(q.path)
2998 newpath, i = lastsavename(q.path)
2999 if not newpath:
2999 if not newpath:
3000 ui.warn(_("no saved queues found, please use -n\n"))
3000 ui.warn(_("no saved queues found, please use -n\n"))
3001 return 1
3001 return 1
3002 mergeq = queue(ui, repo.baseui, repo.path, newpath)
3002 mergeq = queue(ui, repo.baseui, repo.path, newpath)
3003 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
3003 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
3004 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
3004 ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
3005 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
3005 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'),
3006 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
3006 exact=opts.get('exact'), nobackup=opts.get('no_backup'),
3007 keepchanges=opts.get('keep_changes'))
3007 keepchanges=opts.get('keep_changes'))
3008 return ret
3008 return ret
3009
3009
3010 @command("qpop",
3010 @command("qpop",
3011 [('a', 'all', None, _('pop all patches')),
3011 [('a', 'all', None, _('pop all patches')),
3012 ('n', 'name', '',
3012 ('n', 'name', '',
3013 _('queue name to pop (DEPRECATED)'), _('NAME')),
3013 _('queue name to pop (DEPRECATED)'), _('NAME')),
3014 ('', 'keep-changes', None,
3014 ('', 'keep-changes', None,
3015 _('tolerate non-conflicting local changes')),
3015 _('tolerate non-conflicting local changes')),
3016 ('f', 'force', None, _('forget any local changes to patched files')),
3016 ('f', 'force', None, _('forget any local changes to patched files')),
3017 ('', 'no-backup', None, _('do not save backup copies of files'))],
3017 ('', 'no-backup', None, _('do not save backup copies of files'))],
3018 _('hg qpop [-a] [-f] [PATCH | INDEX]'),
3018 _('hg qpop [-a] [-f] [PATCH | INDEX]'),
3019 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3019 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION,
3020 helpbasic=True)
3020 helpbasic=True)
3021 def pop(ui, repo, patch=None, **opts):
3021 def pop(ui, repo, patch=None, **opts):
3022 """pop the current patch off the stack
3022 """pop the current patch off the stack
3023
3023
3024 Without argument, pops off the top of the patch stack. If given a
3024 Without argument, pops off the top of the patch stack. If given a
3025 patch name, keeps popping off patches until the named patch is at
3025 patch name, keeps popping off patches until the named patch is at
3026 the top of the stack.
3026 the top of the stack.
3027
3027
3028 By default, abort if the working directory contains uncommitted
3028 By default, abort if the working directory contains uncommitted
3029 changes. With --keep-changes, abort only if the uncommitted files
3029 changes. With --keep-changes, abort only if the uncommitted files
3030 overlap with patched files. With -f/--force, backup and discard
3030 overlap with patched files. With -f/--force, backup and discard
3031 changes made to such files.
3031 changes made to such files.
3032
3032
3033 Return 0 on success.
3033 Return 0 on success.
3034 """
3034 """
3035 opts = pycompat.byteskwargs(opts)
3035 opts = pycompat.byteskwargs(opts)
3036 opts = fixkeepchangesopts(ui, opts)
3036 opts = fixkeepchangesopts(ui, opts)
3037 localupdate = True
3037 localupdate = True
3038 if opts.get('name'):
3038 if opts.get('name'):
3039 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
3039 q = queue(ui, repo.baseui, repo.path, repo.vfs.join(opts.get('name')))
3040 ui.warn(_('using patch queue: %s\n') % q.path)
3040 ui.warn(_('using patch queue: %s\n') % q.path)
3041 localupdate = False
3041 localupdate = False
3042 else:
3042 else:
3043 q = repo.mq
3043 q = repo.mq
3044 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
3044 ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
3045 all=opts.get('all'), nobackup=opts.get('no_backup'),
3045 all=opts.get('all'), nobackup=opts.get('no_backup'),
3046 keepchanges=opts.get('keep_changes'))
3046 keepchanges=opts.get('keep_changes'))
3047 q.savedirty()
3047 q.savedirty()
3048 return ret
3048 return ret
3049
3049
3050 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'),
3050 @command("qrename|qmv", [], _('hg qrename PATCH1 [PATCH2]'),
3051 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3051 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3052 def rename(ui, repo, patch, name=None, **opts):
3052 def rename(ui, repo, patch, name=None, **opts):
3053 """rename a patch
3053 """rename a patch
3054
3054
3055 With one argument, renames the current patch to PATCH1.
3055 With one argument, renames the current patch to PATCH1.
3056 With two arguments, renames PATCH1 to PATCH2.
3056 With two arguments, renames PATCH1 to PATCH2.
3057
3057
3058 Returns 0 on success."""
3058 Returns 0 on success."""
3059 q = repo.mq
3059 q = repo.mq
3060 if not name:
3060 if not name:
3061 name = patch
3061 name = patch
3062 patch = None
3062 patch = None
3063
3063
3064 if patch:
3064 if patch:
3065 patch = q.lookup(patch)
3065 patch = q.lookup(patch)
3066 else:
3066 else:
3067 if not q.applied:
3067 if not q.applied:
3068 ui.write(_('no patches applied\n'))
3068 ui.write(_('no patches applied\n'))
3069 return
3069 return
3070 patch = q.lookup('qtip')
3070 patch = q.lookup('qtip')
3071 absdest = q.join(name)
3071 absdest = q.join(name)
3072 if os.path.isdir(absdest):
3072 if os.path.isdir(absdest):
3073 name = normname(os.path.join(name, os.path.basename(patch)))
3073 name = normname(os.path.join(name, os.path.basename(patch)))
3074 absdest = q.join(name)
3074 absdest = q.join(name)
3075 q.checkpatchname(name)
3075 q.checkpatchname(name)
3076
3076
3077 ui.note(_('renaming %s to %s\n') % (patch, name))
3077 ui.note(_('renaming %s to %s\n') % (patch, name))
3078 i = q.findseries(patch)
3078 i = q.findseries(patch)
3079 guards = q.guard_re.findall(q.fullseries[i])
3079 guards = q.guard_re.findall(q.fullseries[i])
3080 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3080 q.fullseries[i] = name + ''.join([' #' + g for g in guards])
3081 q.parseseries()
3081 q.parseseries()
3082 q.seriesdirty = True
3082 q.seriesdirty = True
3083
3083
3084 info = q.isapplied(patch)
3084 info = q.isapplied(patch)
3085 if info:
3085 if info:
3086 q.applied[info[0]] = statusentry(info[1], name)
3086 q.applied[info[0]] = statusentry(info[1], name)
3087 q.applieddirty = True
3087 q.applieddirty = True
3088
3088
3089 destdir = os.path.dirname(absdest)
3089 destdir = os.path.dirname(absdest)
3090 if not os.path.isdir(destdir):
3090 if not os.path.isdir(destdir):
3091 os.makedirs(destdir)
3091 os.makedirs(destdir)
3092 util.rename(q.join(patch), absdest)
3092 util.rename(q.join(patch), absdest)
3093 r = q.qrepo()
3093 r = q.qrepo()
3094 if r and patch in r.dirstate:
3094 if r and patch in r.dirstate:
3095 wctx = r[None]
3095 wctx = r[None]
3096 with r.wlock():
3096 with r.wlock():
3097 if r.dirstate[patch] == 'a':
3097 if r.dirstate[patch] == 'a':
3098 r.dirstate.drop(patch)
3098 r.dirstate.drop(patch)
3099 r.dirstate.add(name)
3099 r.dirstate.add(name)
3100 else:
3100 else:
3101 wctx.copy(patch, name)
3101 wctx.copy(patch, name)
3102 wctx.forget([patch])
3102 wctx.forget([patch])
3103
3103
3104 q.savedirty()
3104 q.savedirty()
3105
3105
3106 @command("qrestore",
3106 @command("qrestore",
3107 [('d', 'delete', None, _('delete save entry')),
3107 [('d', 'delete', None, _('delete save entry')),
3108 ('u', 'update', None, _('update queue working directory'))],
3108 ('u', 'update', None, _('update queue working directory'))],
3109 _('hg qrestore [-d] [-u] REV'),
3109 _('hg qrestore [-d] [-u] REV'),
3110 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3110 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3111 def restore(ui, repo, rev, **opts):
3111 def restore(ui, repo, rev, **opts):
3112 """restore the queue state saved by a revision (DEPRECATED)
3112 """restore the queue state saved by a revision (DEPRECATED)
3113
3113
3114 This command is deprecated, use :hg:`rebase` instead."""
3114 This command is deprecated, use :hg:`rebase` instead."""
3115 rev = repo.lookup(rev)
3115 rev = repo.lookup(rev)
3116 q = repo.mq
3116 q = repo.mq
3117 q.restore(repo, rev, delete=opts.get(r'delete'),
3117 q.restore(repo, rev, delete=opts.get(r'delete'),
3118 qupdate=opts.get(r'update'))
3118 qupdate=opts.get(r'update'))
3119 q.savedirty()
3119 q.savedirty()
3120 return 0
3120 return 0
3121
3121
3122 @command("qsave",
3122 @command("qsave",
3123 [('c', 'copy', None, _('copy patch directory')),
3123 [('c', 'copy', None, _('copy patch directory')),
3124 ('n', 'name', '',
3124 ('n', 'name', '',
3125 _('copy directory name'), _('NAME')),
3125 _('copy directory name'), _('NAME')),
3126 ('e', 'empty', None, _('clear queue status file')),
3126 ('e', 'empty', None, _('clear queue status file')),
3127 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3127 ('f', 'force', None, _('force copy'))] + cmdutil.commitopts,
3128 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
3128 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
3129 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3129 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3130 def save(ui, repo, **opts):
3130 def save(ui, repo, **opts):
3131 """save current queue state (DEPRECATED)
3131 """save current queue state (DEPRECATED)
3132
3132
3133 This command is deprecated, use :hg:`rebase` instead."""
3133 This command is deprecated, use :hg:`rebase` instead."""
3134 q = repo.mq
3134 q = repo.mq
3135 opts = pycompat.byteskwargs(opts)
3135 opts = pycompat.byteskwargs(opts)
3136 message = cmdutil.logmessage(ui, opts)
3136 message = cmdutil.logmessage(ui, opts)
3137 ret = q.save(repo, msg=message)
3137 ret = q.save(repo, msg=message)
3138 if ret:
3138 if ret:
3139 return ret
3139 return ret
3140 q.savedirty() # save to .hg/patches before copying
3140 q.savedirty() # save to .hg/patches before copying
3141 if opts.get('copy'):
3141 if opts.get('copy'):
3142 path = q.path
3142 path = q.path
3143 if opts.get('name'):
3143 if opts.get('name'):
3144 newpath = os.path.join(q.basepath, opts.get('name'))
3144 newpath = os.path.join(q.basepath, opts.get('name'))
3145 if os.path.exists(newpath):
3145 if os.path.exists(newpath):
3146 if not os.path.isdir(newpath):
3146 if not os.path.isdir(newpath):
3147 raise error.Abort(_('destination %s exists and is not '
3147 raise error.Abort(_('destination %s exists and is not '
3148 'a directory') % newpath)
3148 'a directory') % newpath)
3149 if not opts.get('force'):
3149 if not opts.get('force'):
3150 raise error.Abort(_('destination %s exists, '
3150 raise error.Abort(_('destination %s exists, '
3151 'use -f to force') % newpath)
3151 'use -f to force') % newpath)
3152 else:
3152 else:
3153 newpath = savename(path)
3153 newpath = savename(path)
3154 ui.warn(_("copy %s to %s\n") % (path, newpath))
3154 ui.warn(_("copy %s to %s\n") % (path, newpath))
3155 util.copyfiles(path, newpath)
3155 util.copyfiles(path, newpath)
3156 if opts.get('empty'):
3156 if opts.get('empty'):
3157 del q.applied[:]
3157 del q.applied[:]
3158 q.applieddirty = True
3158 q.applieddirty = True
3159 q.savedirty()
3159 q.savedirty()
3160 return 0
3160 return 0
3161
3161
3162
3162
3163 @command("qselect",
3163 @command("qselect",
3164 [('n', 'none', None, _('disable all guards')),
3164 [('n', 'none', None, _('disable all guards')),
3165 ('s', 'series', None, _('list all guards in series file')),
3165 ('s', 'series', None, _('list all guards in series file')),
3166 ('', 'pop', None, _('pop to before first guarded applied patch')),
3166 ('', 'pop', None, _('pop to before first guarded applied patch')),
3167 ('', 'reapply', None, _('pop, then reapply patches'))],
3167 ('', 'reapply', None, _('pop, then reapply patches'))],
3168 _('hg qselect [OPTION]... [GUARD]...'),
3168 _('hg qselect [OPTION]... [GUARD]...'),
3169 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3169 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3170 def select(ui, repo, *args, **opts):
3170 def select(ui, repo, *args, **opts):
3171 '''set or print guarded patches to push
3171 '''set or print guarded patches to push
3172
3172
3173 Use the :hg:`qguard` command to set or print guards on patch, then use
3173 Use the :hg:`qguard` command to set or print guards on patch, then use
3174 qselect to tell mq which guards to use. A patch will be pushed if
3174 qselect to tell mq which guards to use. A patch will be pushed if
3175 it has no guards or any positive guards match the currently
3175 it has no guards or any positive guards match the currently
3176 selected guard, but will not be pushed if any negative guards
3176 selected guard, but will not be pushed if any negative guards
3177 match the current guard. For example::
3177 match the current guard. For example::
3178
3178
3179 qguard foo.patch -- -stable (negative guard)
3179 qguard foo.patch -- -stable (negative guard)
3180 qguard bar.patch +stable (positive guard)
3180 qguard bar.patch +stable (positive guard)
3181 qselect stable
3181 qselect stable
3182
3182
3183 This activates the "stable" guard. mq will skip foo.patch (because
3183 This activates the "stable" guard. mq will skip foo.patch (because
3184 it has a negative match) but push bar.patch (because it has a
3184 it has a negative match) but push bar.patch (because it has a
3185 positive match).
3185 positive match).
3186
3186
3187 With no arguments, prints the currently active guards.
3187 With no arguments, prints the currently active guards.
3188 With one argument, sets the active guard.
3188 With one argument, sets the active guard.
3189
3189
3190 Use -n/--none to deactivate guards (no other arguments needed).
3190 Use -n/--none to deactivate guards (no other arguments needed).
3191 When no guards are active, patches with positive guards are
3191 When no guards are active, patches with positive guards are
3192 skipped and patches with negative guards are pushed.
3192 skipped and patches with negative guards are pushed.
3193
3193
3194 qselect can change the guards on applied patches. It does not pop
3194 qselect can change the guards on applied patches. It does not pop
3195 guarded patches by default. Use --pop to pop back to the last
3195 guarded patches by default. Use --pop to pop back to the last
3196 applied patch that is not guarded. Use --reapply (which implies
3196 applied patch that is not guarded. Use --reapply (which implies
3197 --pop) to push back to the current patch afterwards, but skip
3197 --pop) to push back to the current patch afterwards, but skip
3198 guarded patches.
3198 guarded patches.
3199
3199
3200 Use -s/--series to print a list of all guards in the series file
3200 Use -s/--series to print a list of all guards in the series file
3201 (no other arguments needed). Use -v for more information.
3201 (no other arguments needed). Use -v for more information.
3202
3202
3203 Returns 0 on success.'''
3203 Returns 0 on success.'''
3204
3204
3205 q = repo.mq
3205 q = repo.mq
3206 opts = pycompat.byteskwargs(opts)
3206 opts = pycompat.byteskwargs(opts)
3207 guards = q.active()
3207 guards = q.active()
3208 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3208 pushable = lambda i: q.pushable(q.applied[i].name)[0]
3209 if args or opts.get('none'):
3209 if args or opts.get('none'):
3210 old_unapplied = q.unapplied(repo)
3210 old_unapplied = q.unapplied(repo)
3211 old_guarded = [i for i in pycompat.xrange(len(q.applied))
3211 old_guarded = [i for i in pycompat.xrange(len(q.applied))
3212 if not pushable(i)]
3212 if not pushable(i)]
3213 q.setactive(args)
3213 q.setactive(args)
3214 q.savedirty()
3214 q.savedirty()
3215 if not args:
3215 if not args:
3216 ui.status(_('guards deactivated\n'))
3216 ui.status(_('guards deactivated\n'))
3217 if not opts.get('pop') and not opts.get('reapply'):
3217 if not opts.get('pop') and not opts.get('reapply'):
3218 unapplied = q.unapplied(repo)
3218 unapplied = q.unapplied(repo)
3219 guarded = [i for i in pycompat.xrange(len(q.applied))
3219 guarded = [i for i in pycompat.xrange(len(q.applied))
3220 if not pushable(i)]
3220 if not pushable(i)]
3221 if len(unapplied) != len(old_unapplied):
3221 if len(unapplied) != len(old_unapplied):
3222 ui.status(_('number of unguarded, unapplied patches has '
3222 ui.status(_('number of unguarded, unapplied patches has '
3223 'changed from %d to %d\n') %
3223 'changed from %d to %d\n') %
3224 (len(old_unapplied), len(unapplied)))
3224 (len(old_unapplied), len(unapplied)))
3225 if len(guarded) != len(old_guarded):
3225 if len(guarded) != len(old_guarded):
3226 ui.status(_('number of guarded, applied patches has changed '
3226 ui.status(_('number of guarded, applied patches has changed '
3227 'from %d to %d\n') %
3227 'from %d to %d\n') %
3228 (len(old_guarded), len(guarded)))
3228 (len(old_guarded), len(guarded)))
3229 elif opts.get('series'):
3229 elif opts.get('series'):
3230 guards = {}
3230 guards = {}
3231 noguards = 0
3231 noguards = 0
3232 for gs in q.seriesguards:
3232 for gs in q.seriesguards:
3233 if not gs:
3233 if not gs:
3234 noguards += 1
3234 noguards += 1
3235 for g in gs:
3235 for g in gs:
3236 guards.setdefault(g, 0)
3236 guards.setdefault(g, 0)
3237 guards[g] += 1
3237 guards[g] += 1
3238 if ui.verbose:
3238 if ui.verbose:
3239 guards['NONE'] = noguards
3239 guards['NONE'] = noguards
3240 guards = list(guards.items())
3240 guards = list(guards.items())
3241 guards.sort(key=lambda x: x[0][1:])
3241 guards.sort(key=lambda x: x[0][1:])
3242 if guards:
3242 if guards:
3243 ui.note(_('guards in series file:\n'))
3243 ui.note(_('guards in series file:\n'))
3244 for guard, count in guards:
3244 for guard, count in guards:
3245 ui.note('%2d ' % count)
3245 ui.note('%2d ' % count)
3246 ui.write(guard, '\n')
3246 ui.write(guard, '\n')
3247 else:
3247 else:
3248 ui.note(_('no guards in series file\n'))
3248 ui.note(_('no guards in series file\n'))
3249 else:
3249 else:
3250 if guards:
3250 if guards:
3251 ui.note(_('active guards:\n'))
3251 ui.note(_('active guards:\n'))
3252 for g in guards:
3252 for g in guards:
3253 ui.write(g, '\n')
3253 ui.write(g, '\n')
3254 else:
3254 else:
3255 ui.write(_('no active guards\n'))
3255 ui.write(_('no active guards\n'))
3256 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3256 reapply = opts.get('reapply') and q.applied and q.applied[-1].name
3257 popped = False
3257 popped = False
3258 if opts.get('pop') or opts.get('reapply'):
3258 if opts.get('pop') or opts.get('reapply'):
3259 for i in pycompat.xrange(len(q.applied)):
3259 for i in pycompat.xrange(len(q.applied)):
3260 if not pushable(i):
3260 if not pushable(i):
3261 ui.status(_('popping guarded patches\n'))
3261 ui.status(_('popping guarded patches\n'))
3262 popped = True
3262 popped = True
3263 if i == 0:
3263 if i == 0:
3264 q.pop(repo, all=True)
3264 q.pop(repo, all=True)
3265 else:
3265 else:
3266 q.pop(repo, q.applied[i - 1].name)
3266 q.pop(repo, q.applied[i - 1].name)
3267 break
3267 break
3268 if popped:
3268 if popped:
3269 try:
3269 try:
3270 if reapply:
3270 if reapply:
3271 ui.status(_('reapplying unguarded patches\n'))
3271 ui.status(_('reapplying unguarded patches\n'))
3272 q.push(repo, reapply)
3272 q.push(repo, reapply)
3273 finally:
3273 finally:
3274 q.savedirty()
3274 q.savedirty()
3275
3275
3276 @command("qfinish",
3276 @command("qfinish",
3277 [('a', 'applied', None, _('finish all applied changesets'))],
3277 [('a', 'applied', None, _('finish all applied changesets'))],
3278 _('hg qfinish [-a] [REV]...'),
3278 _('hg qfinish [-a] [REV]...'),
3279 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3279 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3280 def finish(ui, repo, *revrange, **opts):
3280 def finish(ui, repo, *revrange, **opts):
3281 """move applied patches into repository history
3281 """move applied patches into repository history
3282
3282
3283 Finishes the specified revisions (corresponding to applied
3283 Finishes the specified revisions (corresponding to applied
3284 patches) by moving them out of mq control into regular repository
3284 patches) by moving them out of mq control into regular repository
3285 history.
3285 history.
3286
3286
3287 Accepts a revision range or the -a/--applied option. If --applied
3287 Accepts a revision range or the -a/--applied option. If --applied
3288 is specified, all applied mq revisions are removed from mq
3288 is specified, all applied mq revisions are removed from mq
3289 control. Otherwise, the given revisions must be at the base of the
3289 control. Otherwise, the given revisions must be at the base of the
3290 stack of applied patches.
3290 stack of applied patches.
3291
3291
3292 This can be especially useful if your changes have been applied to
3292 This can be especially useful if your changes have been applied to
3293 an upstream repository, or if you are about to push your changes
3293 an upstream repository, or if you are about to push your changes
3294 to upstream.
3294 to upstream.
3295
3295
3296 Returns 0 on success.
3296 Returns 0 on success.
3297 """
3297 """
3298 if not opts.get(r'applied') and not revrange:
3298 if not opts.get(r'applied') and not revrange:
3299 raise error.Abort(_('no revisions specified'))
3299 raise error.Abort(_('no revisions specified'))
3300 elif opts.get(r'applied'):
3300 elif opts.get(r'applied'):
3301 revrange = ('qbase::qtip',) + revrange
3301 revrange = ('qbase::qtip',) + revrange
3302
3302
3303 q = repo.mq
3303 q = repo.mq
3304 if not q.applied:
3304 if not q.applied:
3305 ui.status(_('no patches applied\n'))
3305 ui.status(_('no patches applied\n'))
3306 return 0
3306 return 0
3307
3307
3308 revs = scmutil.revrange(repo, revrange)
3308 revs = scmutil.revrange(repo, revrange)
3309 if repo['.'].rev() in revs and repo[None].files():
3309 if repo['.'].rev() in revs and repo[None].files():
3310 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3310 ui.warn(_('warning: uncommitted changes in the working directory\n'))
3311 # queue.finish may changes phases but leave the responsibility to lock the
3311 # queue.finish may changes phases but leave the responsibility to lock the
3312 # repo to the caller to avoid deadlock with wlock. This command code is
3312 # repo to the caller to avoid deadlock with wlock. This command code is
3313 # responsibility for this locking.
3313 # responsibility for this locking.
3314 with repo.lock():
3314 with repo.lock():
3315 q.finish(repo, revs)
3315 q.finish(repo, revs)
3316 q.savedirty()
3316 q.savedirty()
3317 return 0
3317 return 0
3318
3318
3319 @command("qqueue",
3319 @command("qqueue",
3320 [('l', 'list', False, _('list all available queues')),
3320 [('l', 'list', False, _('list all available queues')),
3321 ('', 'active', False, _('print name of active queue')),
3321 ('', 'active', False, _('print name of active queue')),
3322 ('c', 'create', False, _('create new queue')),
3322 ('c', 'create', False, _('create new queue')),
3323 ('', 'rename', False, _('rename active queue')),
3323 ('', 'rename', False, _('rename active queue')),
3324 ('', 'delete', False, _('delete reference to queue')),
3324 ('', 'delete', False, _('delete reference to queue')),
3325 ('', 'purge', False, _('delete queue, and remove patch dir')),
3325 ('', 'purge', False, _('delete queue, and remove patch dir')),
3326 ],
3326 ],
3327 _('[OPTION] [QUEUE]'),
3327 _('[OPTION] [QUEUE]'),
3328 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3328 helpcategory=command.CATEGORY_CHANGE_ORGANIZATION)
3329 def qqueue(ui, repo, name=None, **opts):
3329 def qqueue(ui, repo, name=None, **opts):
3330 '''manage multiple patch queues
3330 '''manage multiple patch queues
3331
3331
3332 Supports switching between different patch queues, as well as creating
3332 Supports switching between different patch queues, as well as creating
3333 new patch queues and deleting existing ones.
3333 new patch queues and deleting existing ones.
3334
3334
3335 Omitting a queue name or specifying -l/--list will show you the registered
3335 Omitting a queue name or specifying -l/--list will show you the registered
3336 queues - by default the "normal" patches queue is registered. The currently
3336 queues - by default the "normal" patches queue is registered. The currently
3337 active queue will be marked with "(active)". Specifying --active will print
3337 active queue will be marked with "(active)". Specifying --active will print
3338 only the name of the active queue.
3338 only the name of the active queue.
3339
3339
3340 To create a new queue, use -c/--create. The queue is automatically made
3340 To create a new queue, use -c/--create. The queue is automatically made
3341 active, except in the case where there are applied patches from the
3341 active, except in the case where there are applied patches from the
3342 currently active queue in the repository. Then the queue will only be
3342 currently active queue in the repository. Then the queue will only be
3343 created and switching will fail.
3343 created and switching will fail.
3344
3344
3345 To delete an existing queue, use --delete. You cannot delete the currently
3345 To delete an existing queue, use --delete. You cannot delete the currently
3346 active queue.
3346 active queue.
3347
3347
3348 Returns 0 on success.
3348 Returns 0 on success.
3349 '''
3349 '''
3350 q = repo.mq
3350 q = repo.mq
3351 _defaultqueue = 'patches'
3351 _defaultqueue = 'patches'
3352 _allqueues = 'patches.queues'
3352 _allqueues = 'patches.queues'
3353 _activequeue = 'patches.queue'
3353 _activequeue = 'patches.queue'
3354
3354
3355 def _getcurrent():
3355 def _getcurrent():
3356 cur = os.path.basename(q.path)
3356 cur = os.path.basename(q.path)
3357 if cur.startswith('patches-'):
3357 if cur.startswith('patches-'):
3358 cur = cur[8:]
3358 cur = cur[8:]
3359 return cur
3359 return cur
3360
3360
3361 def _noqueues():
3361 def _noqueues():
3362 try:
3362 try:
3363 fh = repo.vfs(_allqueues, 'r')
3363 fh = repo.vfs(_allqueues, 'r')
3364 fh.close()
3364 fh.close()
3365 except IOError:
3365 except IOError:
3366 return True
3366 return True
3367
3367
3368 return False
3368 return False
3369
3369
3370 def _getqueues():
3370 def _getqueues():
3371 current = _getcurrent()
3371 current = _getcurrent()
3372
3372
3373 try:
3373 try:
3374 fh = repo.vfs(_allqueues, 'r')
3374 fh = repo.vfs(_allqueues, 'r')
3375 queues = [queue.strip() for queue in fh if queue.strip()]
3375 queues = [queue.strip() for queue in fh if queue.strip()]
3376 fh.close()
3376 fh.close()
3377 if current not in queues:
3377 if current not in queues:
3378 queues.append(current)
3378 queues.append(current)
3379 except IOError:
3379 except IOError:
3380 queues = [_defaultqueue]
3380 queues = [_defaultqueue]
3381
3381
3382 return sorted(queues)
3382 return sorted(queues)
3383
3383
3384 def _setactive(name):
3384 def _setactive(name):
3385 if q.applied:
3385 if q.applied:
3386 raise error.Abort(_('new queue created, but cannot make active '
3386 raise error.Abort(_('new queue created, but cannot make active '
3387 'as patches are applied'))
3387 'as patches are applied'))
3388 _setactivenocheck(name)
3388 _setactivenocheck(name)
3389
3389
3390 def _setactivenocheck(name):
3390 def _setactivenocheck(name):
3391 fh = repo.vfs(_activequeue, 'w')
3391 fh = repo.vfs(_activequeue, 'w')
3392 if name != 'patches':
3392 if name != 'patches':
3393 fh.write(name)
3393 fh.write(name)
3394 fh.close()
3394 fh.close()
3395
3395
3396 def _addqueue(name):
3396 def _addqueue(name):
3397 fh = repo.vfs(_allqueues, 'a')
3397 fh = repo.vfs(_allqueues, 'a')
3398 fh.write('%s\n' % (name,))
3398 fh.write('%s\n' % (name,))
3399 fh.close()
3399 fh.close()
3400
3400
3401 def _queuedir(name):
3401 def _queuedir(name):
3402 if name == 'patches':
3402 if name == 'patches':
3403 return repo.vfs.join('patches')
3403 return repo.vfs.join('patches')
3404 else:
3404 else:
3405 return repo.vfs.join('patches-' + name)
3405 return repo.vfs.join('patches-' + name)
3406
3406
3407 def _validname(name):
3407 def _validname(name):
3408 for n in name:
3408 for n in name:
3409 if n in ':\\/.':
3409 if n in ':\\/.':
3410 return False
3410 return False
3411 return True
3411 return True
3412
3412
3413 def _delete(name):
3413 def _delete(name):
3414 if name not in existing:
3414 if name not in existing:
3415 raise error.Abort(_('cannot delete queue that does not exist'))
3415 raise error.Abort(_('cannot delete queue that does not exist'))
3416
3416
3417 current = _getcurrent()
3417 current = _getcurrent()
3418
3418
3419 if name == current:
3419 if name == current:
3420 raise error.Abort(_('cannot delete currently active queue'))
3420 raise error.Abort(_('cannot delete currently active queue'))
3421
3421
3422 fh = repo.vfs('patches.queues.new', 'w')
3422 fh = repo.vfs('patches.queues.new', 'w')
3423 for queue in existing:
3423 for queue in existing:
3424 if queue == name:
3424 if queue == name:
3425 continue
3425 continue
3426 fh.write('%s\n' % (queue,))
3426 fh.write('%s\n' % (queue,))
3427 fh.close()
3427 fh.close()
3428 repo.vfs.rename('patches.queues.new', _allqueues)
3428 repo.vfs.rename('patches.queues.new', _allqueues)
3429
3429
3430 opts = pycompat.byteskwargs(opts)
3430 opts = pycompat.byteskwargs(opts)
3431 if not name or opts.get('list') or opts.get('active'):
3431 if not name or opts.get('list') or opts.get('active'):
3432 current = _getcurrent()
3432 current = _getcurrent()
3433 if opts.get('active'):
3433 if opts.get('active'):
3434 ui.write('%s\n' % (current,))
3434 ui.write('%s\n' % (current,))
3435 return
3435 return
3436 for queue in _getqueues():
3436 for queue in _getqueues():
3437 ui.write('%s' % (queue,))
3437 ui.write('%s' % (queue,))
3438 if queue == current and not ui.quiet:
3438 if queue == current and not ui.quiet:
3439 ui.write(_(' (active)\n'))
3439 ui.write(_(' (active)\n'))
3440 else:
3440 else:
3441 ui.write('\n')
3441 ui.write('\n')
3442 return
3442 return
3443
3443
3444 if not _validname(name):
3444 if not _validname(name):
3445 raise error.Abort(
3445 raise error.Abort(
3446 _('invalid queue name, may not contain the characters ":\\/."'))
3446 _('invalid queue name, may not contain the characters ":\\/."'))
3447
3447
3448 with repo.wlock():
3448 with repo.wlock():
3449 existing = _getqueues()
3449 existing = _getqueues()
3450
3450
3451 if opts.get('create'):
3451 if opts.get('create'):
3452 if name in existing:
3452 if name in existing:
3453 raise error.Abort(_('queue "%s" already exists') % name)
3453 raise error.Abort(_('queue "%s" already exists') % name)
3454 if _noqueues():
3454 if _noqueues():
3455 _addqueue(_defaultqueue)
3455 _addqueue(_defaultqueue)
3456 _addqueue(name)
3456 _addqueue(name)
3457 _setactive(name)
3457 _setactive(name)
3458 elif opts.get('rename'):
3458 elif opts.get('rename'):
3459 current = _getcurrent()
3459 current = _getcurrent()
3460 if name == current:
3460 if name == current:
3461 raise error.Abort(_('can\'t rename "%s" to its current name')
3461 raise error.Abort(_('can\'t rename "%s" to its current name')
3462 % name)
3462 % name)
3463 if name in existing:
3463 if name in existing:
3464 raise error.Abort(_('queue "%s" already exists') % name)
3464 raise error.Abort(_('queue "%s" already exists') % name)
3465
3465
3466 olddir = _queuedir(current)
3466 olddir = _queuedir(current)
3467 newdir = _queuedir(name)
3467 newdir = _queuedir(name)
3468
3468
3469 if os.path.exists(newdir):
3469 if os.path.exists(newdir):
3470 raise error.Abort(_('non-queue directory "%s" already exists') %
3470 raise error.Abort(_('non-queue directory "%s" already exists') %
3471 newdir)
3471 newdir)
3472
3472
3473 fh = repo.vfs('patches.queues.new', 'w')
3473 fh = repo.vfs('patches.queues.new', 'w')
3474 for queue in existing:
3474 for queue in existing:
3475 if queue == current:
3475 if queue == current:
3476 fh.write('%s\n' % (name,))
3476 fh.write('%s\n' % (name,))
3477 if os.path.exists(olddir):
3477 if os.path.exists(olddir):
3478 util.rename(olddir, newdir)
3478 util.rename(olddir, newdir)
3479 else:
3479 else:
3480 fh.write('%s\n' % (queue,))
3480 fh.write('%s\n' % (queue,))
3481 fh.close()
3481 fh.close()
3482 repo.vfs.rename('patches.queues.new', _allqueues)
3482 repo.vfs.rename('patches.queues.new', _allqueues)
3483 _setactivenocheck(name)
3483 _setactivenocheck(name)
3484 elif opts.get('delete'):
3484 elif opts.get('delete'):
3485 _delete(name)
3485 _delete(name)
3486 elif opts.get('purge'):
3486 elif opts.get('purge'):
3487 if name in existing:
3487 if name in existing:
3488 _delete(name)
3488 _delete(name)
3489 qdir = _queuedir(name)
3489 qdir = _queuedir(name)
3490 if os.path.exists(qdir):
3490 if os.path.exists(qdir):
3491 shutil.rmtree(qdir)
3491 shutil.rmtree(qdir)
3492 else:
3492 else:
3493 if name not in existing:
3493 if name not in existing:
3494 raise error.Abort(_('use --create to create a new queue'))
3494 raise error.Abort(_('use --create to create a new queue'))
3495 _setactive(name)
3495 _setactive(name)
3496
3496
3497 def mqphasedefaults(repo, roots):
3497 def mqphasedefaults(repo, roots):
3498 """callback used to set mq changeset as secret when no phase data exists"""
3498 """callback used to set mq changeset as secret when no phase data exists"""
3499 if repo.mq.applied:
3499 if repo.mq.applied:
3500 if repo.ui.configbool('mq', 'secret'):
3500 if repo.ui.configbool('mq', 'secret'):
3501 mqphase = phases.secret
3501 mqphase = phases.secret
3502 else:
3502 else:
3503 mqphase = phases.draft
3503 mqphase = phases.draft
3504 qbase = repo[repo.mq.applied[0].node]
3504 qbase = repo[repo.mq.applied[0].node]
3505 roots[mqphase].add(qbase.node())
3505 roots[mqphase].add(qbase.node())
3506 return roots
3506 return roots
3507
3507
3508 def reposetup(ui, repo):
3508 def reposetup(ui, repo):
3509 class mqrepo(repo.__class__):
3509 class mqrepo(repo.__class__):
3510 @localrepo.unfilteredpropertycache
3510 @localrepo.unfilteredpropertycache
3511 def mq(self):
3511 def mq(self):
3512 return queue(self.ui, self.baseui, self.path)
3512 return queue(self.ui, self.baseui, self.path)
3513
3513
3514 def invalidateall(self):
3514 def invalidateall(self):
3515 super(mqrepo, self).invalidateall()
3515 super(mqrepo, self).invalidateall()
3516 if localrepo.hasunfilteredcache(self, r'mq'):
3516 if localrepo.hasunfilteredcache(self, r'mq'):
3517 # recreate mq in case queue path was changed
3517 # recreate mq in case queue path was changed
3518 delattr(self.unfiltered(), r'mq')
3518 delattr(self.unfiltered(), r'mq')
3519
3519
3520 def abortifwdirpatched(self, errmsg, force=False):
3520 def abortifwdirpatched(self, errmsg, force=False):
3521 if self.mq.applied and self.mq.checkapplied and not force:
3521 if self.mq.applied and self.mq.checkapplied and not force:
3522 parents = self.dirstate.parents()
3522 parents = self.dirstate.parents()
3523 patches = [s.node for s in self.mq.applied]
3523 patches = [s.node for s in self.mq.applied]
3524 if any(p in patches for p in parents):
3524 if any(p in patches for p in parents):
3525 raise error.Abort(errmsg)
3525 raise error.Abort(errmsg)
3526
3526
3527 def commit(self, text="", user=None, date=None, match=None,
3527 def commit(self, text="", user=None, date=None, match=None,
3528 force=False, editor=False, extra=None):
3528 force=False, editor=False, extra=None):
3529 if extra is None:
3529 if extra is None:
3530 extra = {}
3530 extra = {}
3531 self.abortifwdirpatched(
3531 self.abortifwdirpatched(
3532 _('cannot commit over an applied mq patch'),
3532 _('cannot commit over an applied mq patch'),
3533 force)
3533 force)
3534
3534
3535 return super(mqrepo, self).commit(text, user, date, match, force,
3535 return super(mqrepo, self).commit(text, user, date, match, force,
3536 editor, extra)
3536 editor, extra)
3537
3537
3538 def checkpush(self, pushop):
3538 def checkpush(self, pushop):
3539 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3539 if self.mq.applied and self.mq.checkapplied and not pushop.force:
3540 outapplied = [e.node for e in self.mq.applied]
3540 outapplied = [e.node for e in self.mq.applied]
3541 if pushop.revs:
3541 if pushop.revs:
3542 # Assume applied patches have no non-patch descendants and
3542 # Assume applied patches have no non-patch descendants and
3543 # are not on remote already. Filtering any changeset not
3543 # are not on remote already. Filtering any changeset not
3544 # pushed.
3544 # pushed.
3545 heads = set(pushop.revs)
3545 heads = set(pushop.revs)
3546 for node in reversed(outapplied):
3546 for node in reversed(outapplied):
3547 if node in heads:
3547 if node in heads:
3548 break
3548 break
3549 else:
3549 else:
3550 outapplied.pop()
3550 outapplied.pop()
3551 # looking for pushed and shared changeset
3551 # looking for pushed and shared changeset
3552 for node in outapplied:
3552 for node in outapplied:
3553 if self[node].phase() < phases.secret:
3553 if self[node].phase() < phases.secret:
3554 raise error.Abort(_('source has mq patches applied'))
3554 raise error.Abort(_('source has mq patches applied'))
3555 # no non-secret patches pushed
3555 # no non-secret patches pushed
3556 super(mqrepo, self).checkpush(pushop)
3556 super(mqrepo, self).checkpush(pushop)
3557
3557
3558 def _findtags(self):
3558 def _findtags(self):
3559 '''augment tags from base class with patch tags'''
3559 '''augment tags from base class with patch tags'''
3560 result = super(mqrepo, self)._findtags()
3560 result = super(mqrepo, self)._findtags()
3561
3561
3562 q = self.mq
3562 q = self.mq
3563 if not q.applied:
3563 if not q.applied:
3564 return result
3564 return result
3565
3565
3566 mqtags = [(patch.node, patch.name) for patch in q.applied]
3566 mqtags = [(patch.node, patch.name) for patch in q.applied]
3567
3567
3568 try:
3568 try:
3569 # for now ignore filtering business
3569 # for now ignore filtering business
3570 self.unfiltered().changelog.rev(mqtags[-1][0])
3570 self.unfiltered().changelog.rev(mqtags[-1][0])
3571 except error.LookupError:
3571 except error.LookupError:
3572 self.ui.warn(_('mq status file refers to unknown node %s\n')
3572 self.ui.warn(_('mq status file refers to unknown node %s\n')
3573 % short(mqtags[-1][0]))
3573 % short(mqtags[-1][0]))
3574 return result
3574 return result
3575
3575
3576 # do not add fake tags for filtered revisions
3576 # do not add fake tags for filtered revisions
3577 included = self.changelog.hasnode
3577 included = self.changelog.hasnode
3578 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3578 mqtags = [mqt for mqt in mqtags if included(mqt[0])]
3579 if not mqtags:
3579 if not mqtags:
3580 return result
3580 return result
3581
3581
3582 mqtags.append((mqtags[-1][0], 'qtip'))
3582 mqtags.append((mqtags[-1][0], 'qtip'))
3583 mqtags.append((mqtags[0][0], 'qbase'))
3583 mqtags.append((mqtags[0][0], 'qbase'))
3584 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3584 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
3585 tags = result[0]
3585 tags = result[0]
3586 for patch in mqtags:
3586 for patch in mqtags:
3587 if patch[1] in tags:
3587 if patch[1] in tags:
3588 self.ui.warn(_('tag %s overrides mq patch of the same '
3588 self.ui.warn(_('tag %s overrides mq patch of the same '
3589 'name\n') % patch[1])
3589 'name\n') % patch[1])
3590 else:
3590 else:
3591 tags[patch[1]] = patch[0]
3591 tags[patch[1]] = patch[0]
3592
3592
3593 return result
3593 return result
3594
3594
3595 if repo.local():
3595 if repo.local():
3596 repo.__class__ = mqrepo
3596 repo.__class__ = mqrepo
3597
3597
3598 repo._phasedefaults.append(mqphasedefaults)
3598 repo._phasedefaults.append(mqphasedefaults)
3599
3599
3600 def mqimport(orig, ui, repo, *args, **kwargs):
3600 def mqimport(orig, ui, repo, *args, **kwargs):
3601 if (util.safehasattr(repo, 'abortifwdirpatched')
3601 if (util.safehasattr(repo, 'abortifwdirpatched')
3602 and not kwargs.get(r'no_commit', False)):
3602 and not kwargs.get(r'no_commit', False)):
3603 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3603 repo.abortifwdirpatched(_('cannot import over an applied patch'),
3604 kwargs.get(r'force'))
3604 kwargs.get(r'force'))
3605 return orig(ui, repo, *args, **kwargs)
3605 return orig(ui, repo, *args, **kwargs)
3606
3606
3607 def mqinit(orig, ui, *args, **kwargs):
3607 def mqinit(orig, ui, *args, **kwargs):
3608 mq = kwargs.pop(r'mq', None)
3608 mq = kwargs.pop(r'mq', None)
3609
3609
3610 if not mq:
3610 if not mq:
3611 return orig(ui, *args, **kwargs)
3611 return orig(ui, *args, **kwargs)
3612
3612
3613 if args:
3613 if args:
3614 repopath = args[0]
3614 repopath = args[0]
3615 if not hg.islocal(repopath):
3615 if not hg.islocal(repopath):
3616 raise error.Abort(_('only a local queue repository '
3616 raise error.Abort(_('only a local queue repository '
3617 'may be initialized'))
3617 'may be initialized'))
3618 else:
3618 else:
3619 repopath = cmdutil.findrepo(encoding.getcwd())
3619 repopath = cmdutil.findrepo(encoding.getcwd())
3620 if not repopath:
3620 if not repopath:
3621 raise error.Abort(_('there is no Mercurial repository here '
3621 raise error.Abort(_('there is no Mercurial repository here '
3622 '(.hg not found)'))
3622 '(.hg not found)'))
3623 repo = hg.repository(ui, repopath)
3623 repo = hg.repository(ui, repopath)
3624 return qinit(ui, repo, True)
3624 return qinit(ui, repo, True)
3625
3625
3626 def mqcommand(orig, ui, repo, *args, **kwargs):
3626 def mqcommand(orig, ui, repo, *args, **kwargs):
3627 """Add --mq option to operate on patch repository instead of main"""
3627 """Add --mq option to operate on patch repository instead of main"""
3628
3628
3629 # some commands do not like getting unknown options
3629 # some commands do not like getting unknown options
3630 mq = kwargs.pop(r'mq', None)
3630 mq = kwargs.pop(r'mq', None)
3631
3631
3632 if not mq:
3632 if not mq:
3633 return orig(ui, repo, *args, **kwargs)
3633 return orig(ui, repo, *args, **kwargs)
3634
3634
3635 q = repo.mq
3635 q = repo.mq
3636 r = q.qrepo()
3636 r = q.qrepo()
3637 if not r:
3637 if not r:
3638 raise error.Abort(_('no queue repository'))
3638 raise error.Abort(_('no queue repository'))
3639 return orig(r.ui, r, *args, **kwargs)
3639 return orig(r.ui, r, *args, **kwargs)
3640
3640
3641 def summaryhook(ui, repo):
3641 def summaryhook(ui, repo):
3642 q = repo.mq
3642 q = repo.mq
3643 m = []
3643 m = []
3644 a, u = len(q.applied), len(q.unapplied(repo))
3644 a, u = len(q.applied), len(q.unapplied(repo))
3645 if a:
3645 if a:
3646 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3646 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
3647 if u:
3647 if u:
3648 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3648 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
3649 if m:
3649 if m:
3650 # i18n: column positioning for "hg summary"
3650 # i18n: column positioning for "hg summary"
3651 ui.write(_("mq: %s\n") % ', '.join(m))
3651 ui.write(_("mq: %s\n") % ', '.join(m))
3652 else:
3652 else:
3653 # i18n: column positioning for "hg summary"
3653 # i18n: column positioning for "hg summary"
3654 ui.note(_("mq: (empty queue)\n"))
3654 ui.note(_("mq: (empty queue)\n"))
3655
3655
3656 revsetpredicate = registrar.revsetpredicate()
3656 revsetpredicate = registrar.revsetpredicate()
3657
3657
3658 @revsetpredicate('mq()')
3658 @revsetpredicate('mq()')
3659 def revsetmq(repo, subset, x):
3659 def revsetmq(repo, subset, x):
3660 """Changesets managed by MQ.
3660 """Changesets managed by MQ.
3661 """
3661 """
3662 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3662 revsetlang.getargs(x, 0, 0, _("mq takes no arguments"))
3663 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3663 applied = set([repo[r.node].rev() for r in repo.mq.applied])
3664 return smartset.baseset([r for r in subset if r in applied])
3664 return smartset.baseset([r for r in subset if r in applied])
3665
3665
3666 # tell hggettext to extract docstrings from these functions:
3666 # tell hggettext to extract docstrings from these functions:
3667 i18nfunctions = [revsetmq]
3667 i18nfunctions = [revsetmq]
3668
3668
3669 def extsetup(ui):
3669 def extsetup(ui):
3670 # Ensure mq wrappers are called first, regardless of extension load order by
3670 # Ensure mq wrappers are called first, regardless of extension load order by
3671 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3671 # NOT wrapping in uisetup() and instead deferring to init stage two here.
3672 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3672 mqopt = [('', 'mq', None, _("operate on patch repository"))]
3673
3673
3674 extensions.wrapcommand(commands.table, 'import', mqimport)
3674 extensions.wrapcommand(commands.table, 'import', mqimport)
3675 cmdutil.summaryhooks.add('mq', summaryhook)
3675 cmdutil.summaryhooks.add('mq', summaryhook)
3676
3676
3677 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3677 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
3678 entry[1].extend(mqopt)
3678 entry[1].extend(mqopt)
3679
3679
3680 def dotable(cmdtable):
3680 def dotable(cmdtable):
3681 for cmd, entry in cmdtable.iteritems():
3681 for cmd, entry in cmdtable.iteritems():
3682 cmd = cmdutil.parsealiases(cmd)[0]
3682 cmd = cmdutil.parsealiases(cmd)[0]
3683 func = entry[0]
3683 func = entry[0]
3684 if func.norepo:
3684 if func.norepo:
3685 continue
3685 continue
3686 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3686 entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
3687 entry[1].extend(mqopt)
3687 entry[1].extend(mqopt)
3688
3688
3689 dotable(commands.table)
3689 dotable(commands.table)
3690
3690
3691 for extname, extmodule in extensions.extensions():
3691 for extname, extmodule in extensions.extensions():
3692 if extmodule.__file__ != __file__:
3692 if extmodule.__file__ != __file__:
3693 dotable(getattr(extmodule, 'cmdtable', {}))
3693 dotable(getattr(extmodule, 'cmdtable', {}))
3694
3694
3695 colortable = {'qguard.negative': 'red',
3695 colortable = {'qguard.negative': 'red',
3696 'qguard.positive': 'yellow',
3696 'qguard.positive': 'yellow',
3697 'qguard.unguarded': 'green',
3697 'qguard.unguarded': 'green',
3698 'qseries.applied': 'blue bold underline',
3698 'qseries.applied': 'blue bold underline',
3699 'qseries.guarded': 'black bold',
3699 'qseries.guarded': 'black bold',
3700 'qseries.missing': 'red bold',
3700 'qseries.missing': 'red bold',
3701 'qseries.unapplied': 'black bold'}
3701 'qseries.unapplied': 'black bold'}
@@ -1,1012 +1,1012
1 # phabricator.py - simple Phabricator integration
1 # phabricator.py - simple Phabricator integration
2 #
2 #
3 # Copyright 2017 Facebook, Inc.
3 # Copyright 2017 Facebook, Inc.
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 """simple Phabricator integration (EXPERIMENTAL)
7 """simple Phabricator integration (EXPERIMENTAL)
8
8
9 This extension provides a ``phabsend`` command which sends a stack of
9 This extension provides a ``phabsend`` command which sends a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
10 changesets to Phabricator, and a ``phabread`` command which prints a stack of
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
11 revisions in a format suitable for :hg:`import`, and a ``phabupdate`` command
12 to update statuses in batch.
12 to update statuses in batch.
13
13
14 By default, Phabricator requires ``Test Plan`` which might prevent some
14 By default, Phabricator requires ``Test Plan`` which might prevent some
15 changeset from being sent. The requirement could be disabled by changing
15 changeset from being sent. The requirement could be disabled by changing
16 ``differential.require-test-plan-field`` config server side.
16 ``differential.require-test-plan-field`` config server side.
17
17
18 Config::
18 Config::
19
19
20 [phabricator]
20 [phabricator]
21 # Phabricator URL
21 # Phabricator URL
22 url = https://phab.example.com/
22 url = https://phab.example.com/
23
23
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
24 # Repo callsign. If a repo has a URL https://$HOST/diffusion/FOO, then its
25 # callsign is "FOO".
25 # callsign is "FOO".
26 callsign = FOO
26 callsign = FOO
27
27
28 # curl command to use. If not set (default), use builtin HTTP library to
28 # curl command to use. If not set (default), use builtin HTTP library to
29 # communicate. If set, use the specified curl command. This could be useful
29 # communicate. If set, use the specified curl command. This could be useful
30 # if you need to specify advanced options that is not easily supported by
30 # if you need to specify advanced options that is not easily supported by
31 # the internal library.
31 # the internal library.
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
32 curlcmd = curl --connect-timeout 2 --retry 3 --silent
33
33
34 [auth]
34 [auth]
35 example.schemes = https
35 example.schemes = https
36 example.prefix = phab.example.com
36 example.prefix = phab.example.com
37
37
38 # API token. Get it from https://$HOST/conduit/login/
38 # API token. Get it from https://$HOST/conduit/login/
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
39 example.phabtoken = cli-xxxxxxxxxxxxxxxxxxxxxxxxxxxx
40 """
40 """
41
41
42 from __future__ import absolute_import
42 from __future__ import absolute_import
43
43
44 import contextlib
44 import contextlib
45 import itertools
45 import itertools
46 import json
46 import json
47 import operator
47 import operator
48 import re
48 import re
49
49
50 from mercurial.node import bin, nullid
50 from mercurial.node import bin, nullid
51 from mercurial.i18n import _
51 from mercurial.i18n import _
52 from mercurial import (
52 from mercurial import (
53 cmdutil,
53 cmdutil,
54 context,
54 context,
55 encoding,
55 encoding,
56 error,
56 error,
57 httpconnection as httpconnectionmod,
57 httpconnection as httpconnectionmod,
58 mdiff,
58 mdiff,
59 obsutil,
59 obsutil,
60 parser,
60 parser,
61 patch,
61 patch,
62 phases,
62 phases,
63 registrar,
63 registrar,
64 scmutil,
64 scmutil,
65 smartset,
65 smartset,
66 tags,
66 tags,
67 templateutil,
67 templateutil,
68 url as urlmod,
68 url as urlmod,
69 util,
69 util,
70 )
70 )
71 from mercurial.utils import (
71 from mercurial.utils import (
72 procutil,
72 procutil,
73 stringutil,
73 stringutil,
74 )
74 )
75
75
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
76 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
77 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
78 # be specifying the version(s) of Mercurial they are tested with, or
78 # be specifying the version(s) of Mercurial they are tested with, or
79 # leave the attribute unspecified.
79 # leave the attribute unspecified.
80 testedwith = 'ships-with-hg-core'
80 testedwith = 'ships-with-hg-core'
81
81
82 cmdtable = {}
82 cmdtable = {}
83 command = registrar.command(cmdtable)
83 command = registrar.command(cmdtable)
84
84
85 configtable = {}
85 configtable = {}
86 configitem = registrar.configitem(configtable)
86 configitem = registrar.configitem(configtable)
87
87
88 # developer config: phabricator.batchsize
88 # developer config: phabricator.batchsize
89 configitem(b'phabricator', b'batchsize',
89 configitem(b'phabricator', b'batchsize',
90 default=12,
90 default=12,
91 )
91 )
92 configitem(b'phabricator', b'callsign',
92 configitem(b'phabricator', b'callsign',
93 default=None,
93 default=None,
94 )
94 )
95 configitem(b'phabricator', b'curlcmd',
95 configitem(b'phabricator', b'curlcmd',
96 default=None,
96 default=None,
97 )
97 )
98 # developer config: phabricator.repophid
98 # developer config: phabricator.repophid
99 configitem(b'phabricator', b'repophid',
99 configitem(b'phabricator', b'repophid',
100 default=None,
100 default=None,
101 )
101 )
102 configitem(b'phabricator', b'url',
102 configitem(b'phabricator', b'url',
103 default=None,
103 default=None,
104 )
104 )
105 configitem(b'phabsend', b'confirm',
105 configitem(b'phabsend', b'confirm',
106 default=False,
106 default=False,
107 )
107 )
108
108
109 colortable = {
109 colortable = {
110 b'phabricator.action.created': b'green',
110 b'phabricator.action.created': b'green',
111 b'phabricator.action.skipped': b'magenta',
111 b'phabricator.action.skipped': b'magenta',
112 b'phabricator.action.updated': b'magenta',
112 b'phabricator.action.updated': b'magenta',
113 b'phabricator.desc': b'',
113 b'phabricator.desc': b'',
114 b'phabricator.drev': b'bold',
114 b'phabricator.drev': b'bold',
115 b'phabricator.node': b'',
115 b'phabricator.node': b'',
116 }
116 }
117
117
118 _VCR_FLAGS = [
118 _VCR_FLAGS = [
119 (b'', b'test-vcr', b'',
119 (b'', b'test-vcr', b'',
120 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
120 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript'
121 b', otherwise will mock all http requests using the specified vcr file.'
121 b', otherwise will mock all http requests using the specified vcr file.'
122 b' (ADVANCED)'
122 b' (ADVANCED)'
123 )),
123 )),
124 ]
124 ]
125
125
126 def vcrcommand(name, flags, spec, helpcategory=None):
126 def vcrcommand(name, flags, spec, helpcategory=None):
127 fullflags = flags + _VCR_FLAGS
127 fullflags = flags + _VCR_FLAGS
128 def decorate(fn):
128 def decorate(fn):
129 def inner(*args, **kwargs):
129 def inner(*args, **kwargs):
130 cassette = kwargs.pop(r'test_vcr', None)
130 cassette = kwargs.pop(r'test_vcr', None)
131 if cassette:
131 if cassette:
132 import hgdemandimport
132 import hgdemandimport
133 with hgdemandimport.deactivated():
133 with hgdemandimport.deactivated():
134 import vcr as vcrmod
134 import vcr as vcrmod
135 import vcr.stubs as stubs
135 import vcr.stubs as stubs
136 vcr = vcrmod.VCR(
136 vcr = vcrmod.VCR(
137 serializer=r'json',
137 serializer=r'json',
138 custom_patches=[
138 custom_patches=[
139 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
139 (urlmod, 'httpconnection', stubs.VCRHTTPConnection),
140 (urlmod, 'httpsconnection',
140 (urlmod, 'httpsconnection',
141 stubs.VCRHTTPSConnection),
141 stubs.VCRHTTPSConnection),
142 ])
142 ])
143 with vcr.use_cassette(cassette):
143 with vcr.use_cassette(cassette):
144 return fn(*args, **kwargs)
144 return fn(*args, **kwargs)
145 return fn(*args, **kwargs)
145 return fn(*args, **kwargs)
146 inner.__name__ = fn.__name__
146 inner.__name__ = fn.__name__
147 inner.__doc__ = fn.__doc__
147 inner.__doc__ = fn.__doc__
148 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
148 return command(name, fullflags, spec, helpcategory=helpcategory)(inner)
149 return decorate
149 return decorate
150
150
151 def urlencodenested(params):
151 def urlencodenested(params):
152 """like urlencode, but works with nested parameters.
152 """like urlencode, but works with nested parameters.
153
153
154 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
154 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be
155 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
155 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to
156 urlencode. Note: the encoding is consistent with PHP's http_build_query.
156 urlencode. Note: the encoding is consistent with PHP's http_build_query.
157 """
157 """
158 flatparams = util.sortdict()
158 flatparams = util.sortdict()
159 def process(prefix, obj):
159 def process(prefix, obj):
160 if isinstance(obj, bool):
160 if isinstance(obj, bool):
161 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
161 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form
162 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
162 items = {list: enumerate, dict: lambda x: x.items()}.get(type(obj))
163 if items is None:
163 if items is None:
164 flatparams[prefix] = obj
164 flatparams[prefix] = obj
165 else:
165 else:
166 for k, v in items(obj):
166 for k, v in items(obj):
167 if prefix:
167 if prefix:
168 process(b'%s[%s]' % (prefix, k), v)
168 process(b'%s[%s]' % (prefix, k), v)
169 else:
169 else:
170 process(k, v)
170 process(k, v)
171 process(b'', params)
171 process(b'', params)
172 return util.urlreq.urlencode(flatparams)
172 return util.urlreq.urlencode(flatparams)
173
173
174 def readurltoken(repo):
174 def readurltoken(repo):
175 """return conduit url, token and make sure they exist
175 """return conduit url, token and make sure they exist
176
176
177 Currently read from [auth] config section. In the future, it might
177 Currently read from [auth] config section. In the future, it might
178 make sense to read from .arcconfig and .arcrc as well.
178 make sense to read from .arcconfig and .arcrc as well.
179 """
179 """
180 url = repo.ui.config(b'phabricator', b'url')
180 url = repo.ui.config(b'phabricator', b'url')
181 if not url:
181 if not url:
182 raise error.Abort(_(b'config %s.%s is required')
182 raise error.Abort(_(b'config %s.%s is required')
183 % (b'phabricator', b'url'))
183 % (b'phabricator', b'url'))
184
184
185 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
185 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user)
186 token = None
186 token = None
187
187
188 if res:
188 if res:
189 group, auth = res
189 group, auth = res
190
190
191 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
191 repo.ui.debug(b"using auth.%s.* for authentication\n" % group)
192
192
193 token = auth.get(b'phabtoken')
193 token = auth.get(b'phabtoken')
194
194
195 if not token:
195 if not token:
196 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
196 raise error.Abort(_(b'Can\'t find conduit token associated to %s')
197 % (url,))
197 % (url,))
198
198
199 return url, token
199 return url, token
200
200
201 def callconduit(repo, name, params):
201 def callconduit(repo, name, params):
202 """call Conduit API, params is a dict. return json.loads result, or None"""
202 """call Conduit API, params is a dict. return json.loads result, or None"""
203 host, token = readurltoken(repo)
203 host, token = readurltoken(repo)
204 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
204 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo()
205 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
205 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params))
206 params = params.copy()
206 params = params.copy()
207 params[b'api.token'] = token
207 params[b'api.token'] = token
208 data = urlencodenested(params)
208 data = urlencodenested(params)
209 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
209 curlcmd = repo.ui.config(b'phabricator', b'curlcmd')
210 if curlcmd:
210 if curlcmd:
211 sin, sout = procutil.popen2(b'%s -d @- %s'
211 sin, sout = procutil.popen2(b'%s -d @- %s'
212 % (curlcmd, procutil.shellquote(url)))
212 % (curlcmd, procutil.shellquote(url)))
213 sin.write(data)
213 sin.write(data)
214 sin.close()
214 sin.close()
215 body = sout.read()
215 body = sout.read()
216 else:
216 else:
217 urlopener = urlmod.opener(repo.ui, authinfo)
217 urlopener = urlmod.opener(repo.ui, authinfo)
218 request = util.urlreq.request(url, data=data)
218 request = util.urlreq.request(url, data=data)
219 with contextlib.closing(urlopener.open(request)) as rsp:
219 with contextlib.closing(urlopener.open(request)) as rsp:
220 body = rsp.read()
220 body = rsp.read()
221 repo.ui.debug(b'Conduit Response: %s\n' % body)
221 repo.ui.debug(b'Conduit Response: %s\n' % body)
222 parsed = json.loads(body)
222 parsed = json.loads(body)
223 if parsed.get(r'error_code'):
223 if parsed.get(r'error_code'):
224 msg = (_(b'Conduit Error (%s): %s')
224 msg = (_(b'Conduit Error (%s): %s')
225 % (parsed[r'error_code'], parsed[r'error_info']))
225 % (parsed[r'error_code'], parsed[r'error_info']))
226 raise error.Abort(msg)
226 raise error.Abort(msg)
227 return parsed[r'result']
227 return parsed[r'result']
228
228
229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
229 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'))
230 def debugcallconduit(ui, repo, name):
230 def debugcallconduit(ui, repo, name):
231 """call Conduit API
231 """call Conduit API
232
232
233 Call parameters are read from stdin as a JSON blob. Result will be written
233 Call parameters are read from stdin as a JSON blob. Result will be written
234 to stdout as a JSON blob.
234 to stdout as a JSON blob.
235 """
235 """
236 params = json.loads(ui.fin.read())
236 params = json.loads(ui.fin.read())
237 result = callconduit(repo, name, params)
237 result = callconduit(repo, name, params)
238 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
238 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': '))
239 ui.write(b'%s\n' % s)
239 ui.write(b'%s\n' % s)
240
240
241 def getrepophid(repo):
241 def getrepophid(repo):
242 """given callsign, return repository PHID or None"""
242 """given callsign, return repository PHID or None"""
243 # developer config: phabricator.repophid
243 # developer config: phabricator.repophid
244 repophid = repo.ui.config(b'phabricator', b'repophid')
244 repophid = repo.ui.config(b'phabricator', b'repophid')
245 if repophid:
245 if repophid:
246 return repophid
246 return repophid
247 callsign = repo.ui.config(b'phabricator', b'callsign')
247 callsign = repo.ui.config(b'phabricator', b'callsign')
248 if not callsign:
248 if not callsign:
249 return None
249 return None
250 query = callconduit(repo, b'diffusion.repository.search',
250 query = callconduit(repo, b'diffusion.repository.search',
251 {b'constraints': {b'callsigns': [callsign]}})
251 {b'constraints': {b'callsigns': [callsign]}})
252 if len(query[r'data']) == 0:
252 if len(query[r'data']) == 0:
253 return None
253 return None
254 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
254 repophid = encoding.strtolocal(query[r'data'][0][r'phid'])
255 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
255 repo.ui.setconfig(b'phabricator', b'repophid', repophid)
256 return repophid
256 return repophid
257
257
258 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z')
258 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z')
259 _differentialrevisiondescre = re.compile(
259 _differentialrevisiondescre = re.compile(
260 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
260 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M)
261
261
262 def getoldnodedrevmap(repo, nodelist):
262 def getoldnodedrevmap(repo, nodelist):
263 """find previous nodes that has been sent to Phabricator
263 """find previous nodes that has been sent to Phabricator
264
264
265 return {node: (oldnode, Differential diff, Differential Revision ID)}
265 return {node: (oldnode, Differential diff, Differential Revision ID)}
266 for node in nodelist with known previous sent versions, or associated
266 for node in nodelist with known previous sent versions, or associated
267 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
267 Differential Revision IDs. ``oldnode`` and ``Differential diff`` could
268 be ``None``.
268 be ``None``.
269
269
270 Examines commit messages like "Differential Revision:" to get the
270 Examines commit messages like "Differential Revision:" to get the
271 association information.
271 association information.
272
272
273 If such commit message line is not found, examines all precursors and their
273 If such commit message line is not found, examines all precursors and their
274 tags. Tags with format like "D1234" are considered a match and the node
274 tags. Tags with format like "D1234" are considered a match and the node
275 with that tag, and the number after "D" (ex. 1234) will be returned.
275 with that tag, and the number after "D" (ex. 1234) will be returned.
276
276
277 The ``old node``, if not None, is guaranteed to be the last diff of
277 The ``old node``, if not None, is guaranteed to be the last diff of
278 corresponding Differential Revision, and exist in the repo.
278 corresponding Differential Revision, and exist in the repo.
279 """
279 """
280 unfi = repo.unfiltered()
280 unfi = repo.unfiltered()
281 nodemap = unfi.changelog.nodemap
281 nodemap = unfi.changelog.nodemap
282
282
283 result = {} # {node: (oldnode?, lastdiff?, drev)}
283 result = {} # {node: (oldnode?, lastdiff?, drev)}
284 toconfirm = {} # {node: (force, {precnode}, drev)}
284 toconfirm = {} # {node: (force, {precnode}, drev)}
285 for node in nodelist:
285 for node in nodelist:
286 ctx = unfi[node]
286 ctx = unfi[node]
287 # For tags like "D123", put them into "toconfirm" to verify later
287 # For tags like "D123", put them into "toconfirm" to verify later
288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
288 precnodes = list(obsutil.allpredecessors(unfi.obsstore, [node]))
289 for n in precnodes:
289 for n in precnodes:
290 if n in nodemap:
290 if n in nodemap:
291 for tag in unfi.nodetags(n):
291 for tag in unfi.nodetags(n):
292 m = _differentialrevisiontagre.match(tag)
292 m = _differentialrevisiontagre.match(tag)
293 if m:
293 if m:
294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
294 toconfirm[node] = (0, set(precnodes), int(m.group(1)))
295 continue
295 continue
296
296
297 # Check commit message
297 # Check commit message
298 m = _differentialrevisiondescre.search(ctx.description())
298 m = _differentialrevisiondescre.search(ctx.description())
299 if m:
299 if m:
300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
300 toconfirm[node] = (1, set(precnodes), int(m.group(b'id')))
301
301
302 # Double check if tags are genuine by collecting all old nodes from
302 # Double check if tags are genuine by collecting all old nodes from
303 # Phabricator, and expect precursors overlap with it.
303 # Phabricator, and expect precursors overlap with it.
304 if toconfirm:
304 if toconfirm:
305 drevs = [drev for force, precs, drev in toconfirm.values()]
305 drevs = [drev for force, precs, drev in toconfirm.values()]
306 alldiffs = callconduit(unfi, b'differential.querydiffs',
306 alldiffs = callconduit(unfi, b'differential.querydiffs',
307 {b'revisionIDs': drevs})
307 {b'revisionIDs': drevs})
308 getnode = lambda d: bin(encoding.unitolocal(
308 getnode = lambda d: bin(encoding.unitolocal(
309 getdiffmeta(d).get(r'node', b''))) or None
309 getdiffmeta(d).get(r'node', b''))) or None
310 for newnode, (force, precset, drev) in toconfirm.items():
310 for newnode, (force, precset, drev) in toconfirm.items():
311 diffs = [d for d in alldiffs.values()
311 diffs = [d for d in alldiffs.values()
312 if int(d[r'revisionID']) == drev]
312 if int(d[r'revisionID']) == drev]
313
313
314 # "precursors" as known by Phabricator
314 # "precursors" as known by Phabricator
315 phprecset = set(getnode(d) for d in diffs)
315 phprecset = set(getnode(d) for d in diffs)
316
316
317 # Ignore if precursors (Phabricator and local repo) do not overlap,
317 # Ignore if precursors (Phabricator and local repo) do not overlap,
318 # and force is not set (when commit message says nothing)
318 # and force is not set (when commit message says nothing)
319 if not force and not bool(phprecset & precset):
319 if not force and not bool(phprecset & precset):
320 tagname = b'D%d' % drev
320 tagname = b'D%d' % drev
321 tags.tag(repo, tagname, nullid, message=None, user=None,
321 tags.tag(repo, tagname, nullid, message=None, user=None,
322 date=None, local=True)
322 date=None, local=True)
323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
323 unfi.ui.warn(_(b'D%s: local tag removed - does not match '
324 b'Differential history\n') % drev)
324 b'Differential history\n') % drev)
325 continue
325 continue
326
326
327 # Find the last node using Phabricator metadata, and make sure it
327 # Find the last node using Phabricator metadata, and make sure it
328 # exists in the repo
328 # exists in the repo
329 oldnode = lastdiff = None
329 oldnode = lastdiff = None
330 if diffs:
330 if diffs:
331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
331 lastdiff = max(diffs, key=lambda d: int(d[r'id']))
332 oldnode = getnode(lastdiff)
332 oldnode = getnode(lastdiff)
333 if oldnode and oldnode not in nodemap:
333 if oldnode and oldnode not in nodemap:
334 oldnode = None
334 oldnode = None
335
335
336 result[newnode] = (oldnode, lastdiff, drev)
336 result[newnode] = (oldnode, lastdiff, drev)
337
337
338 return result
338 return result
339
339
340 def getdiff(ctx, diffopts):
340 def getdiff(ctx, diffopts):
341 """plain-text diff without header (user, commit message, etc)"""
341 """plain-text diff without header (user, commit message, etc)"""
342 output = util.stringio()
342 output = util.stringio()
343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
343 for chunk, _label in patch.diffui(ctx.repo(), ctx.p1().node(), ctx.node(),
344 None, opts=diffopts):
344 None, opts=diffopts):
345 output.write(chunk)
345 output.write(chunk)
346 return output.getvalue()
346 return output.getvalue()
347
347
348 def creatediff(ctx):
348 def creatediff(ctx):
349 """create a Differential Diff"""
349 """create a Differential Diff"""
350 repo = ctx.repo()
350 repo = ctx.repo()
351 repophid = getrepophid(repo)
351 repophid = getrepophid(repo)
352 # Create a "Differential Diff" via "differential.createrawdiff" API
352 # Create a "Differential Diff" via "differential.createrawdiff" API
353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
353 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))}
354 if repophid:
354 if repophid:
355 params[b'repositoryPHID'] = repophid
355 params[b'repositoryPHID'] = repophid
356 diff = callconduit(repo, b'differential.createrawdiff', params)
356 diff = callconduit(repo, b'differential.createrawdiff', params)
357 if not diff:
357 if not diff:
358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
358 raise error.Abort(_(b'cannot create diff for %s') % ctx)
359 return diff
359 return diff
360
360
361 def writediffproperties(ctx, diff):
361 def writediffproperties(ctx, diff):
362 """write metadata to diff so patches could be applied losslessly"""
362 """write metadata to diff so patches could be applied losslessly"""
363 params = {
363 params = {
364 b'diff_id': diff[r'id'],
364 b'diff_id': diff[r'id'],
365 b'name': b'hg:meta',
365 b'name': b'hg:meta',
366 b'data': json.dumps({
366 b'data': json.dumps({
367 b'user': ctx.user(),
367 b'user': ctx.user(),
368 b'date': b'%d %d' % ctx.date(),
368 b'date': b'%d %d' % ctx.date(),
369 b'node': ctx.hex(),
369 b'node': ctx.hex(),
370 b'parent': ctx.p1().hex(),
370 b'parent': ctx.p1().hex(),
371 }),
371 }),
372 }
372 }
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
373 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
374
374
375 params = {
375 params = {
376 b'diff_id': diff[r'id'],
376 b'diff_id': diff[r'id'],
377 b'name': b'local:commits',
377 b'name': b'local:commits',
378 b'data': json.dumps({
378 b'data': json.dumps({
379 ctx.hex(): {
379 ctx.hex(): {
380 b'author': stringutil.person(ctx.user()),
380 b'author': stringutil.person(ctx.user()),
381 b'authorEmail': stringutil.email(ctx.user()),
381 b'authorEmail': stringutil.email(ctx.user()),
382 b'time': ctx.date()[0],
382 b'time': ctx.date()[0],
383 },
383 },
384 }),
384 }),
385 }
385 }
386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
386 callconduit(ctx.repo(), b'differential.setdiffproperty', params)
387
387
388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
388 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None,
389 olddiff=None, actions=None):
389 olddiff=None, actions=None):
390 """create or update a Differential Revision
390 """create or update a Differential Revision
391
391
392 If revid is None, create a new Differential Revision, otherwise update
392 If revid is None, create a new Differential Revision, otherwise update
393 revid. If parentrevid is not None, set it as a dependency.
393 revid. If parentrevid is not None, set it as a dependency.
394
394
395 If oldnode is not None, check if the patch content (without commit message
395 If oldnode is not None, check if the patch content (without commit message
396 and metadata) has changed before creating another diff.
396 and metadata) has changed before creating another diff.
397
397
398 If actions is not None, they will be appended to the transaction.
398 If actions is not None, they will be appended to the transaction.
399 """
399 """
400 repo = ctx.repo()
400 repo = ctx.repo()
401 if oldnode:
401 if oldnode:
402 diffopts = mdiff.diffopts(git=True, context=32767)
402 diffopts = mdiff.diffopts(git=True, context=32767)
403 oldctx = repo.unfiltered()[oldnode]
403 oldctx = repo.unfiltered()[oldnode]
404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
404 neednewdiff = (getdiff(ctx, diffopts) != getdiff(oldctx, diffopts))
405 else:
405 else:
406 neednewdiff = True
406 neednewdiff = True
407
407
408 transactions = []
408 transactions = []
409 if neednewdiff:
409 if neednewdiff:
410 diff = creatediff(ctx)
410 diff = creatediff(ctx)
411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
411 transactions.append({b'type': b'update', b'value': diff[r'phid']})
412 else:
412 else:
413 # Even if we don't need to upload a new diff because the patch content
413 # Even if we don't need to upload a new diff because the patch content
414 # does not change. We might still need to update its metadata so
414 # does not change. We might still need to update its metadata so
415 # pushers could know the correct node metadata.
415 # pushers could know the correct node metadata.
416 assert olddiff
416 assert olddiff
417 diff = olddiff
417 diff = olddiff
418 writediffproperties(ctx, diff)
418 writediffproperties(ctx, diff)
419
419
420 # Use a temporary summary to set dependency. There might be better ways but
420 # Use a temporary summary to set dependency. There might be better ways but
421 # I cannot find them for now. But do not do that if we are updating an
421 # I cannot find them for now. But do not do that if we are updating an
422 # existing revision (revid is not None) since that introduces visible
422 # existing revision (revid is not None) since that introduces visible
423 # churns (someone edited "Summary" twice) on the web page.
423 # churns (someone edited "Summary" twice) on the web page.
424 if parentrevid and revid is None:
424 if parentrevid and revid is None:
425 summary = b'Depends on D%s' % parentrevid
425 summary = b'Depends on D%s' % parentrevid
426 transactions += [{b'type': b'summary', b'value': summary},
426 transactions += [{b'type': b'summary', b'value': summary},
427 {b'type': b'summary', b'value': b' '}]
427 {b'type': b'summary', b'value': b' '}]
428
428
429 if actions:
429 if actions:
430 transactions += actions
430 transactions += actions
431
431
432 # Parse commit message and update related fields.
432 # Parse commit message and update related fields.
433 desc = ctx.description()
433 desc = ctx.description()
434 info = callconduit(repo, b'differential.parsecommitmessage',
434 info = callconduit(repo, b'differential.parsecommitmessage',
435 {b'corpus': desc})
435 {b'corpus': desc})
436 for k, v in info[r'fields'].items():
436 for k, v in info[r'fields'].items():
437 if k in [b'title', b'summary', b'testPlan']:
437 if k in [b'title', b'summary', b'testPlan']:
438 transactions.append({b'type': k, b'value': v})
438 transactions.append({b'type': k, b'value': v})
439
439
440 params = {b'transactions': transactions}
440 params = {b'transactions': transactions}
441 if revid is not None:
441 if revid is not None:
442 # Update an existing Differential Revision
442 # Update an existing Differential Revision
443 params[b'objectIdentifier'] = revid
443 params[b'objectIdentifier'] = revid
444
444
445 revision = callconduit(repo, b'differential.revision.edit', params)
445 revision = callconduit(repo, b'differential.revision.edit', params)
446 if not revision:
446 if not revision:
447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
447 raise error.Abort(_(b'cannot create revision for %s') % ctx)
448
448
449 return revision, diff
449 return revision, diff
450
450
451 def userphids(repo, names):
451 def userphids(repo, names):
452 """convert user names to PHIDs"""
452 """convert user names to PHIDs"""
453 query = {b'constraints': {b'usernames': names}}
453 query = {b'constraints': {b'usernames': names}}
454 result = callconduit(repo, b'user.search', query)
454 result = callconduit(repo, b'user.search', query)
455 # username not found is not an error of the API. So check if we have missed
455 # username not found is not an error of the API. So check if we have missed
456 # some names here.
456 # some names here.
457 data = result[r'data']
457 data = result[r'data']
458 resolved = set(entry[r'fields'][r'username'] for entry in data)
458 resolved = set(entry[r'fields'][r'username'] for entry in data)
459 unresolved = set(names) - resolved
459 unresolved = set(names) - resolved
460 if unresolved:
460 if unresolved:
461 raise error.Abort(_(b'unknown username: %s')
461 raise error.Abort(_(b'unknown username: %s')
462 % b' '.join(sorted(unresolved)))
462 % b' '.join(sorted(unresolved)))
463 return [entry[r'phid'] for entry in data]
463 return [entry[r'phid'] for entry in data]
464
464
465 @vcrcommand(b'phabsend',
465 @vcrcommand(b'phabsend',
466 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
466 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')),
467 (b'', b'amend', True, _(b'update commit messages')),
467 (b'', b'amend', True, _(b'update commit messages')),
468 (b'', b'reviewer', [], _(b'specify reviewers')),
468 (b'', b'reviewer', [], _(b'specify reviewers')),
469 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
469 (b'', b'confirm', None, _(b'ask for confirmation before sending'))],
470 _(b'REV [OPTIONS]'),
470 _(b'REV [OPTIONS]'),
471 helpcategory=command.CATEGORY_IMPORT_EXPORT)
471 helpcategory=command.CATEGORY_IMPORT_EXPORT)
472 def phabsend(ui, repo, *revs, **opts):
472 def phabsend(ui, repo, *revs, **opts):
473 """upload changesets to Phabricator
473 """upload changesets to Phabricator
474
474
475 If there are multiple revisions specified, they will be send as a stack
475 If there are multiple revisions specified, they will be send as a stack
476 with a linear dependencies relationship using the order specified by the
476 with a linear dependencies relationship using the order specified by the
477 revset.
477 revset.
478
478
479 For the first time uploading changesets, local tags will be created to
479 For the first time uploading changesets, local tags will be created to
480 maintain the association. After the first time, phabsend will check
480 maintain the association. After the first time, phabsend will check
481 obsstore and tags information so it can figure out whether to update an
481 obsstore and tags information so it can figure out whether to update an
482 existing Differential Revision, or create a new one.
482 existing Differential Revision, or create a new one.
483
483
484 If --amend is set, update commit messages so they have the
484 If --amend is set, update commit messages so they have the
485 ``Differential Revision`` URL, remove related tags. This is similar to what
485 ``Differential Revision`` URL, remove related tags. This is similar to what
486 arcanist will do, and is more desired in author-push workflows. Otherwise,
486 arcanist will do, and is more desired in author-push workflows. Otherwise,
487 use local tags to record the ``Differential Revision`` association.
487 use local tags to record the ``Differential Revision`` association.
488
488
489 The --confirm option lets you confirm changesets before sending them. You
489 The --confirm option lets you confirm changesets before sending them. You
490 can also add following to your configuration file to make it default
490 can also add following to your configuration file to make it default
491 behaviour::
491 behaviour::
492
492
493 [phabsend]
493 [phabsend]
494 confirm = true
494 confirm = true
495
495
496 phabsend will check obsstore and the above association to decide whether to
496 phabsend will check obsstore and the above association to decide whether to
497 update an existing Differential Revision, or create a new one.
497 update an existing Differential Revision, or create a new one.
498 """
498 """
499 revs = list(revs) + opts.get(b'rev', [])
499 revs = list(revs) + opts.get(b'rev', [])
500 revs = scmutil.revrange(repo, revs)
500 revs = scmutil.revrange(repo, revs)
501
501
502 if not revs:
502 if not revs:
503 raise error.Abort(_(b'phabsend requires at least one changeset'))
503 raise error.Abort(_(b'phabsend requires at least one changeset'))
504 if opts.get(b'amend'):
504 if opts.get(b'amend'):
505 cmdutil.checkunfinished(repo)
505 cmdutil.checkunfinished(repo)
506
506
507 # {newnode: (oldnode, olddiff, olddrev}
507 # {newnode: (oldnode, olddiff, olddrev}
508 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
508 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs])
509
509
510 confirm = ui.configbool(b'phabsend', b'confirm')
510 confirm = ui.configbool(b'phabsend', b'confirm')
511 confirm |= bool(opts.get(b'confirm'))
511 confirm |= bool(opts.get(b'confirm'))
512 if confirm:
512 if confirm:
513 confirmed = _confirmbeforesend(repo, revs, oldmap)
513 confirmed = _confirmbeforesend(repo, revs, oldmap)
514 if not confirmed:
514 if not confirmed:
515 raise error.Abort(_(b'phabsend cancelled'))
515 raise error.Abort(_(b'phabsend cancelled'))
516
516
517 actions = []
517 actions = []
518 reviewers = opts.get(b'reviewer', [])
518 reviewers = opts.get(b'reviewer', [])
519 if reviewers:
519 if reviewers:
520 phids = userphids(repo, reviewers)
520 phids = userphids(repo, reviewers)
521 actions.append({b'type': b'reviewers.add', b'value': phids})
521 actions.append({b'type': b'reviewers.add', b'value': phids})
522
522
523 drevids = [] # [int]
523 drevids = [] # [int]
524 diffmap = {} # {newnode: diff}
524 diffmap = {} # {newnode: diff}
525
525
526 # Send patches one by one so we know their Differential Revision IDs and
526 # Send patches one by one so we know their Differential Revision IDs and
527 # can provide dependency relationship
527 # can provide dependency relationship
528 lastrevid = None
528 lastrevid = None
529 for rev in revs:
529 for rev in revs:
530 ui.debug(b'sending rev %d\n' % rev)
530 ui.debug(b'sending rev %d\n' % rev)
531 ctx = repo[rev]
531 ctx = repo[rev]
532
532
533 # Get Differential Revision ID
533 # Get Differential Revision ID
534 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
534 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None))
535 if oldnode != ctx.node() or opts.get(b'amend'):
535 if oldnode != ctx.node() or opts.get(b'amend'):
536 # Create or update Differential Revision
536 # Create or update Differential Revision
537 revision, diff = createdifferentialrevision(
537 revision, diff = createdifferentialrevision(
538 ctx, revid, lastrevid, oldnode, olddiff, actions)
538 ctx, revid, lastrevid, oldnode, olddiff, actions)
539 diffmap[ctx.node()] = diff
539 diffmap[ctx.node()] = diff
540 newrevid = int(revision[r'object'][r'id'])
540 newrevid = int(revision[r'object'][r'id'])
541 if revid:
541 if revid:
542 action = b'updated'
542 action = b'updated'
543 else:
543 else:
544 action = b'created'
544 action = b'created'
545
545
546 # Create a local tag to note the association, if commit message
546 # Create a local tag to note the association, if commit message
547 # does not have it already
547 # does not have it already
548 m = _differentialrevisiondescre.search(ctx.description())
548 m = _differentialrevisiondescre.search(ctx.description())
549 if not m or int(m.group(b'id')) != newrevid:
549 if not m or int(m.group(b'id')) != newrevid:
550 tagname = b'D%d' % newrevid
550 tagname = b'D%d' % newrevid
551 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
551 tags.tag(repo, tagname, ctx.node(), message=None, user=None,
552 date=None, local=True)
552 date=None, local=True)
553 else:
553 else:
554 # Nothing changed. But still set "newrevid" so the next revision
554 # Nothing changed. But still set "newrevid" so the next revision
555 # could depend on this one.
555 # could depend on this one.
556 newrevid = revid
556 newrevid = revid
557 action = b'skipped'
557 action = b'skipped'
558
558
559 actiondesc = ui.label(
559 actiondesc = ui.label(
560 {b'created': _(b'created'),
560 {b'created': _(b'created'),
561 b'skipped': _(b'skipped'),
561 b'skipped': _(b'skipped'),
562 b'updated': _(b'updated')}[action],
562 b'updated': _(b'updated')}[action],
563 b'phabricator.action.%s' % action)
563 b'phabricator.action.%s' % action)
564 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
564 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev')
565 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
565 nodedesc = ui.label(bytes(ctx), b'phabricator.node')
566 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
566 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc')
567 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
567 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc,
568 desc))
568 desc))
569 drevids.append(newrevid)
569 drevids.append(newrevid)
570 lastrevid = newrevid
570 lastrevid = newrevid
571
571
572 # Update commit messages and remove tags
572 # Update commit messages and remove tags
573 if opts.get(b'amend'):
573 if opts.get(b'amend'):
574 unfi = repo.unfiltered()
574 unfi = repo.unfiltered()
575 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
575 drevs = callconduit(repo, b'differential.query', {b'ids': drevids})
576 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
576 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'):
577 wnode = unfi[b'.'].node()
577 wnode = unfi[b'.'].node()
578 mapping = {} # {oldnode: [newnode]}
578 mapping = {} # {oldnode: [newnode]}
579 for i, rev in enumerate(revs):
579 for i, rev in enumerate(revs):
580 old = unfi[rev]
580 old = unfi[rev]
581 drevid = drevids[i]
581 drevid = drevids[i]
582 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
582 drev = [d for d in drevs if int(d[r'id']) == drevid][0]
583 newdesc = getdescfromdrev(drev)
583 newdesc = getdescfromdrev(drev)
584 newdesc = encoding.unitolocal(newdesc)
584 newdesc = encoding.unitolocal(newdesc)
585 # Make sure commit message contain "Differential Revision"
585 # Make sure commit message contain "Differential Revision"
586 if old.description() != newdesc:
586 if old.description() != newdesc:
587 if old.phase() == phases.public:
587 if old.phase() == phases.public:
588 ui.warn(_("warning: not updating public commit %s\n")
588 ui.warn(_("warning: not updating public commit %s\n")
589 % scmutil.formatchangeid(old))
589 % scmutil.formatchangeid(old))
590 continue
590 continue
591 parents = [
591 parents = [
592 mapping.get(old.p1().node(), (old.p1(),))[0],
592 mapping.get(old.p1().node(), (old.p1(),))[0],
593 mapping.get(old.p2().node(), (old.p2(),))[0],
593 mapping.get(old.p2().node(), (old.p2(),))[0],
594 ]
594 ]
595 new = context.metadataonlyctx(
595 new = context.metadataonlyctx(
596 repo, old, parents=parents, text=newdesc,
596 repo, old, parents=parents, text=newdesc,
597 user=old.user(), date=old.date(), extra=old.extra())
597 user=old.user(), date=old.date(), extra=old.extra())
598
598
599 newnode = new.commit()
599 newnode = new.commit()
600
600
601 mapping[old.node()] = [newnode]
601 mapping[old.node()] = [newnode]
602 # Update diff property
602 # Update diff property
603 writediffproperties(unfi[newnode], diffmap[old.node()])
603 writediffproperties(unfi[newnode], diffmap[old.node()])
604 # Remove local tags since it's no longer necessary
604 # Remove local tags since it's no longer necessary
605 tagname = b'D%d' % drevid
605 tagname = b'D%d' % drevid
606 if tagname in repo.tags():
606 if tagname in repo.tags():
607 tags.tag(repo, tagname, nullid, message=None, user=None,
607 tags.tag(repo, tagname, nullid, message=None, user=None,
608 date=None, local=True)
608 date=None, local=True)
609 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
609 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True)
610 if wnode in mapping:
610 if wnode in mapping:
611 unfi.setparents(mapping[wnode][0])
611 unfi.setparents(mapping[wnode][0])
612
612
613 # Map from "hg:meta" keys to header understood by "hg import". The order is
613 # Map from "hg:meta" keys to header understood by "hg import". The order is
614 # consistent with "hg export" output.
614 # consistent with "hg export" output.
615 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
615 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'),
616 (r'node', b'Node ID'), (r'parent', b'Parent ')])
616 (r'node', b'Node ID'), (r'parent', b'Parent ')])
617
617
618 def _confirmbeforesend(repo, revs, oldmap):
618 def _confirmbeforesend(repo, revs, oldmap):
619 url, token = readurltoken(repo)
619 url, token = readurltoken(repo)
620 ui = repo.ui
620 ui = repo.ui
621 for rev in revs:
621 for rev in revs:
622 ctx = repo[rev]
622 ctx = repo[rev]
623 desc = ctx.description().splitlines()[0]
623 desc = ctx.description().splitlines()[0]
624 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
624 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None))
625 if drevid:
625 if drevid:
626 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
626 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev')
627 else:
627 else:
628 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
628 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev')
629
629
630 ui.write(_(b'%s - %s: %s\n')
630 ui.write(_(b'%s - %s: %s\n')
631 % (drevdesc,
631 % (drevdesc,
632 ui.label(bytes(ctx), b'phabricator.node'),
632 ui.label(bytes(ctx), b'phabricator.node'),
633 ui.label(desc, b'phabricator.desc')))
633 ui.label(desc, b'phabricator.desc')))
634
634
635 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
635 if ui.promptchoice(_(b'Send the above changes to %s (yn)?'
636 b'$$ &Yes $$ &No') % url):
636 b'$$ &Yes $$ &No') % url):
637 return False
637 return False
638
638
639 return True
639 return True
640
640
641 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
641 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed',
642 b'abandoned'}
642 b'abandoned'}
643
643
644 def _getstatusname(drev):
644 def _getstatusname(drev):
645 """get normalized status name from a Differential Revision"""
645 """get normalized status name from a Differential Revision"""
646 return drev[r'statusName'].replace(b' ', b'').lower()
646 return drev[r'statusName'].replace(b' ', b'').lower()
647
647
648 # Small language to specify differential revisions. Support symbols: (), :X,
648 # Small language to specify differential revisions. Support symbols: (), :X,
649 # +, and -.
649 # +, and -.
650
650
651 _elements = {
651 _elements = {
652 # token-type: binding-strength, primary, prefix, infix, suffix
652 # token-type: binding-strength, primary, prefix, infix, suffix
653 b'(': (12, None, (b'group', 1, b')'), None, None),
653 b'(': (12, None, (b'group', 1, b')'), None, None),
654 b':': (8, None, (b'ancestors', 8), None, None),
654 b':': (8, None, (b'ancestors', 8), None, None),
655 b'&': (5, None, None, (b'and_', 5), None),
655 b'&': (5, None, None, (b'and_', 5), None),
656 b'+': (4, None, None, (b'add', 4), None),
656 b'+': (4, None, None, (b'add', 4), None),
657 b'-': (4, None, None, (b'sub', 4), None),
657 b'-': (4, None, None, (b'sub', 4), None),
658 b')': (0, None, None, None, None),
658 b')': (0, None, None, None, None),
659 b'symbol': (0, b'symbol', None, None, None),
659 b'symbol': (0, b'symbol', None, None, None),
660 b'end': (0, None, None, None, None),
660 b'end': (0, None, None, None, None),
661 }
661 }
662
662
663 def _tokenize(text):
663 def _tokenize(text):
664 view = memoryview(text) # zero-copy slice
664 view = memoryview(text) # zero-copy slice
665 special = b'():+-& '
665 special = b'():+-& '
666 pos = 0
666 pos = 0
667 length = len(text)
667 length = len(text)
668 while pos < length:
668 while pos < length:
669 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
669 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special,
670 view[pos:]))
670 view[pos:]))
671 if symbol:
671 if symbol:
672 yield (b'symbol', symbol, pos)
672 yield (b'symbol', symbol, pos)
673 pos += len(symbol)
673 pos += len(symbol)
674 else: # special char, ignore space
674 else: # special char, ignore space
675 if text[pos] != b' ':
675 if text[pos] != b' ':
676 yield (text[pos], None, pos)
676 yield (text[pos], None, pos)
677 pos += 1
677 pos += 1
678 yield (b'end', None, pos)
678 yield (b'end', None, pos)
679
679
680 def _parse(text):
680 def _parse(text):
681 tree, pos = parser.parser(_elements).parse(_tokenize(text))
681 tree, pos = parser.parser(_elements).parse(_tokenize(text))
682 if pos != len(text):
682 if pos != len(text):
683 raise error.ParseError(b'invalid token', pos)
683 raise error.ParseError(b'invalid token', pos)
684 return tree
684 return tree
685
685
686 def _parsedrev(symbol):
686 def _parsedrev(symbol):
687 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
687 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None"""
688 if symbol.startswith(b'D') and symbol[1:].isdigit():
688 if symbol.startswith(b'D') and symbol[1:].isdigit():
689 return int(symbol[1:])
689 return int(symbol[1:])
690 if symbol.isdigit():
690 if symbol.isdigit():
691 return int(symbol)
691 return int(symbol)
692
692
693 def _prefetchdrevs(tree):
693 def _prefetchdrevs(tree):
694 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
694 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch"""
695 drevs = set()
695 drevs = set()
696 ancestordrevs = set()
696 ancestordrevs = set()
697 op = tree[0]
697 op = tree[0]
698 if op == b'symbol':
698 if op == b'symbol':
699 r = _parsedrev(tree[1])
699 r = _parsedrev(tree[1])
700 if r:
700 if r:
701 drevs.add(r)
701 drevs.add(r)
702 elif op == b'ancestors':
702 elif op == b'ancestors':
703 r, a = _prefetchdrevs(tree[1])
703 r, a = _prefetchdrevs(tree[1])
704 drevs.update(r)
704 drevs.update(r)
705 ancestordrevs.update(r)
705 ancestordrevs.update(r)
706 ancestordrevs.update(a)
706 ancestordrevs.update(a)
707 else:
707 else:
708 for t in tree[1:]:
708 for t in tree[1:]:
709 r, a = _prefetchdrevs(t)
709 r, a = _prefetchdrevs(t)
710 drevs.update(r)
710 drevs.update(r)
711 ancestordrevs.update(a)
711 ancestordrevs.update(a)
712 return drevs, ancestordrevs
712 return drevs, ancestordrevs
713
713
714 def querydrev(repo, spec):
714 def querydrev(repo, spec):
715 """return a list of "Differential Revision" dicts
715 """return a list of "Differential Revision" dicts
716
716
717 spec is a string using a simple query language, see docstring in phabread
717 spec is a string using a simple query language, see docstring in phabread
718 for details.
718 for details.
719
719
720 A "Differential Revision dict" looks like:
720 A "Differential Revision dict" looks like:
721
721
722 {
722 {
723 "id": "2",
723 "id": "2",
724 "phid": "PHID-DREV-672qvysjcczopag46qty",
724 "phid": "PHID-DREV-672qvysjcczopag46qty",
725 "title": "example",
725 "title": "example",
726 "uri": "https://phab.example.com/D2",
726 "uri": "https://phab.example.com/D2",
727 "dateCreated": "1499181406",
727 "dateCreated": "1499181406",
728 "dateModified": "1499182103",
728 "dateModified": "1499182103",
729 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
729 "authorPHID": "PHID-USER-tv3ohwc4v4jeu34otlye",
730 "status": "0",
730 "status": "0",
731 "statusName": "Needs Review",
731 "statusName": "Needs Review",
732 "properties": [],
732 "properties": [],
733 "branch": null,
733 "branch": null,
734 "summary": "",
734 "summary": "",
735 "testPlan": "",
735 "testPlan": "",
736 "lineCount": "2",
736 "lineCount": "2",
737 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
737 "activeDiffPHID": "PHID-DIFF-xoqnjkobbm6k4dk6hi72",
738 "diffs": [
738 "diffs": [
739 "3",
739 "3",
740 "4",
740 "4",
741 ],
741 ],
742 "commits": [],
742 "commits": [],
743 "reviewers": [],
743 "reviewers": [],
744 "ccs": [],
744 "ccs": [],
745 "hashes": [],
745 "hashes": [],
746 "auxiliary": {
746 "auxiliary": {
747 "phabricator:projects": [],
747 "phabricator:projects": [],
748 "phabricator:depends-on": [
748 "phabricator:depends-on": [
749 "PHID-DREV-gbapp366kutjebt7agcd"
749 "PHID-DREV-gbapp366kutjebt7agcd"
750 ]
750 ]
751 },
751 },
752 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
752 "repositoryPHID": "PHID-REPO-hub2hx62ieuqeheznasv",
753 "sourcePath": null
753 "sourcePath": null
754 }
754 }
755 """
755 """
756 def fetch(params):
756 def fetch(params):
757 """params -> single drev or None"""
757 """params -> single drev or None"""
758 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
758 key = (params.get(r'ids') or params.get(r'phids') or [None])[0]
759 if key in prefetched:
759 if key in prefetched:
760 return prefetched[key]
760 return prefetched[key]
761 drevs = callconduit(repo, b'differential.query', params)
761 drevs = callconduit(repo, b'differential.query', params)
762 # Fill prefetched with the result
762 # Fill prefetched with the result
763 for drev in drevs:
763 for drev in drevs:
764 prefetched[drev[r'phid']] = drev
764 prefetched[drev[r'phid']] = drev
765 prefetched[int(drev[r'id'])] = drev
765 prefetched[int(drev[r'id'])] = drev
766 if key not in prefetched:
766 if key not in prefetched:
767 raise error.Abort(_(b'cannot get Differential Revision %r')
767 raise error.Abort(_(b'cannot get Differential Revision %r')
768 % params)
768 % params)
769 return prefetched[key]
769 return prefetched[key]
770
770
771 def getstack(topdrevids):
771 def getstack(topdrevids):
772 """given a top, get a stack from the bottom, [id] -> [id]"""
772 """given a top, get a stack from the bottom, [id] -> [id]"""
773 visited = set()
773 visited = set()
774 result = []
774 result = []
775 queue = [{r'ids': [i]} for i in topdrevids]
775 queue = [{r'ids': [i]} for i in topdrevids]
776 while queue:
776 while queue:
777 params = queue.pop()
777 params = queue.pop()
778 drev = fetch(params)
778 drev = fetch(params)
779 if drev[r'id'] in visited:
779 if drev[r'id'] in visited:
780 continue
780 continue
781 visited.add(drev[r'id'])
781 visited.add(drev[r'id'])
782 result.append(int(drev[r'id']))
782 result.append(int(drev[r'id']))
783 auxiliary = drev.get(r'auxiliary', {})
783 auxiliary = drev.get(r'auxiliary', {})
784 depends = auxiliary.get(r'phabricator:depends-on', [])
784 depends = auxiliary.get(r'phabricator:depends-on', [])
785 for phid in depends:
785 for phid in depends:
786 queue.append({b'phids': [phid]})
786 queue.append({b'phids': [phid]})
787 result.reverse()
787 result.reverse()
788 return smartset.baseset(result)
788 return smartset.baseset(result)
789
789
790 # Initialize prefetch cache
790 # Initialize prefetch cache
791 prefetched = {} # {id or phid: drev}
791 prefetched = {} # {id or phid: drev}
792
792
793 tree = _parse(spec)
793 tree = _parse(spec)
794 drevs, ancestordrevs = _prefetchdrevs(tree)
794 drevs, ancestordrevs = _prefetchdrevs(tree)
795
795
796 # developer config: phabricator.batchsize
796 # developer config: phabricator.batchsize
797 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
797 batchsize = repo.ui.configint(b'phabricator', b'batchsize')
798
798
799 # Prefetch Differential Revisions in batch
799 # Prefetch Differential Revisions in batch
800 tofetch = set(drevs)
800 tofetch = set(drevs)
801 for r in ancestordrevs:
801 for r in ancestordrevs:
802 tofetch.update(range(max(1, r - batchsize), r + 1))
802 tofetch.update(range(max(1, r - batchsize), r + 1))
803 if drevs:
803 if drevs:
804 fetch({r'ids': list(tofetch)})
804 fetch({r'ids': list(tofetch)})
805 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
805 validids = sorted(set(getstack(list(ancestordrevs))) | set(drevs))
806
806
807 # Walk through the tree, return smartsets
807 # Walk through the tree, return smartsets
808 def walk(tree):
808 def walk(tree):
809 op = tree[0]
809 op = tree[0]
810 if op == b'symbol':
810 if op == b'symbol':
811 drev = _parsedrev(tree[1])
811 drev = _parsedrev(tree[1])
812 if drev:
812 if drev:
813 return smartset.baseset([drev])
813 return smartset.baseset([drev])
814 elif tree[1] in _knownstatusnames:
814 elif tree[1] in _knownstatusnames:
815 drevs = [r for r in validids
815 drevs = [r for r in validids
816 if _getstatusname(prefetched[r]) == tree[1]]
816 if _getstatusname(prefetched[r]) == tree[1]]
817 return smartset.baseset(drevs)
817 return smartset.baseset(drevs)
818 else:
818 else:
819 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
819 raise error.Abort(_(b'unknown symbol: %s') % tree[1])
820 elif op in {b'and_', b'add', b'sub'}:
820 elif op in {b'and_', b'add', b'sub'}:
821 assert len(tree) == 3
821 assert len(tree) == 3
822 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
822 return getattr(operator, op)(walk(tree[1]), walk(tree[2]))
823 elif op == b'group':
823 elif op == b'group':
824 return walk(tree[1])
824 return walk(tree[1])
825 elif op == b'ancestors':
825 elif op == b'ancestors':
826 return getstack(walk(tree[1]))
826 return getstack(walk(tree[1]))
827 else:
827 else:
828 raise error.ProgrammingError(b'illegal tree: %r' % tree)
828 raise error.ProgrammingError(b'illegal tree: %r' % tree)
829
829
830 return [prefetched[r] for r in walk(tree)]
830 return [prefetched[r] for r in walk(tree)]
831
831
832 def getdescfromdrev(drev):
832 def getdescfromdrev(drev):
833 """get description (commit message) from "Differential Revision"
833 """get description (commit message) from "Differential Revision"
834
834
835 This is similar to differential.getcommitmessage API. But we only care
835 This is similar to differential.getcommitmessage API. But we only care
836 about limited fields: title, summary, test plan, and URL.
836 about limited fields: title, summary, test plan, and URL.
837 """
837 """
838 title = drev[r'title']
838 title = drev[r'title']
839 summary = drev[r'summary'].rstrip()
839 summary = drev[r'summary'].rstrip()
840 testplan = drev[r'testPlan'].rstrip()
840 testplan = drev[r'testPlan'].rstrip()
841 if testplan:
841 if testplan:
842 testplan = b'Test Plan:\n%s' % testplan
842 testplan = b'Test Plan:\n%s' % testplan
843 uri = b'Differential Revision: %s' % drev[r'uri']
843 uri = b'Differential Revision: %s' % drev[r'uri']
844 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
844 return b'\n\n'.join(filter(None, [title, summary, testplan, uri]))
845
845
846 def getdiffmeta(diff):
846 def getdiffmeta(diff):
847 """get commit metadata (date, node, user, p1) from a diff object
847 """get commit metadata (date, node, user, p1) from a diff object
848
848
849 The metadata could be "hg:meta", sent by phabsend, like:
849 The metadata could be "hg:meta", sent by phabsend, like:
850
850
851 "properties": {
851 "properties": {
852 "hg:meta": {
852 "hg:meta": {
853 "date": "1499571514 25200",
853 "date": "1499571514 25200",
854 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
854 "node": "98c08acae292b2faf60a279b4189beb6cff1414d",
855 "user": "Foo Bar <foo@example.com>",
855 "user": "Foo Bar <foo@example.com>",
856 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
856 "parent": "6d0abad76b30e4724a37ab8721d630394070fe16"
857 }
857 }
858 }
858 }
859
859
860 Or converted from "local:commits", sent by "arc", like:
860 Or converted from "local:commits", sent by "arc", like:
861
861
862 "properties": {
862 "properties": {
863 "local:commits": {
863 "local:commits": {
864 "98c08acae292b2faf60a279b4189beb6cff1414d": {
864 "98c08acae292b2faf60a279b4189beb6cff1414d": {
865 "author": "Foo Bar",
865 "author": "Foo Bar",
866 "time": 1499546314,
866 "time": 1499546314,
867 "branch": "default",
867 "branch": "default",
868 "tag": "",
868 "tag": "",
869 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
869 "commit": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
870 "rev": "98c08acae292b2faf60a279b4189beb6cff1414d",
871 "local": "1000",
871 "local": "1000",
872 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
872 "parents": ["6d0abad76b30e4724a37ab8721d630394070fe16"],
873 "summary": "...",
873 "summary": "...",
874 "message": "...",
874 "message": "...",
875 "authorEmail": "foo@example.com"
875 "authorEmail": "foo@example.com"
876 }
876 }
877 }
877 }
878 }
878 }
879
879
880 Note: metadata extracted from "local:commits" will lose time zone
880 Note: metadata extracted from "local:commits" will lose time zone
881 information.
881 information.
882 """
882 """
883 props = diff.get(r'properties') or {}
883 props = diff.get(r'properties') or {}
884 meta = props.get(r'hg:meta')
884 meta = props.get(r'hg:meta')
885 if not meta and props.get(r'local:commits'):
885 if not meta and props.get(r'local:commits'):
886 commit = sorted(props[r'local:commits'].values())[0]
886 commit = sorted(props[r'local:commits'].values())[0]
887 meta = {
887 meta = {
888 r'date': r'%d 0' % commit[r'time'],
888 r'date': r'%d 0' % commit[r'time'],
889 r'node': commit[r'rev'],
889 r'node': commit[r'rev'],
890 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
890 r'user': r'%s <%s>' % (commit[r'author'], commit[r'authorEmail']),
891 }
891 }
892 if len(commit.get(r'parents', ())) >= 1:
892 if len(commit.get(r'parents', ())) >= 1:
893 meta[r'parent'] = commit[r'parents'][0]
893 meta[r'parent'] = commit[r'parents'][0]
894 return meta or {}
894 return meta or {}
895
895
896 def readpatch(repo, drevs, write):
896 def readpatch(repo, drevs, write):
897 """generate plain-text patch readable by 'hg import'
897 """generate plain-text patch readable by 'hg import'
898
898
899 write is usually ui.write. drevs is what "querydrev" returns, results of
899 write is usually ui.write. drevs is what "querydrev" returns, results of
900 "differential.query".
900 "differential.query".
901 """
901 """
902 # Prefetch hg:meta property for all diffs
902 # Prefetch hg:meta property for all diffs
903 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
903 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs))
904 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
904 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids})
905
905
906 # Generate patch for each drev
906 # Generate patch for each drev
907 for drev in drevs:
907 for drev in drevs:
908 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
908 repo.ui.note(_(b'reading D%s\n') % drev[r'id'])
909
909
910 diffid = max(int(v) for v in drev[r'diffs'])
910 diffid = max(int(v) for v in drev[r'diffs'])
911 body = callconduit(repo, b'differential.getrawdiff',
911 body = callconduit(repo, b'differential.getrawdiff',
912 {b'diffID': diffid})
912 {b'diffID': diffid})
913 desc = getdescfromdrev(drev)
913 desc = getdescfromdrev(drev)
914 header = b'# HG changeset patch\n'
914 header = b'# HG changeset patch\n'
915
915
916 # Try to preserve metadata from hg:meta property. Write hg patch
916 # Try to preserve metadata from hg:meta property. Write hg patch
917 # headers that can be read by the "import" command. See patchheadermap
917 # headers that can be read by the "import" command. See patchheadermap
918 # and extract in mercurial/patch.py for supported headers.
918 # and extract in mercurial/patch.py for supported headers.
919 meta = getdiffmeta(diffs[str(diffid)])
919 meta = getdiffmeta(diffs[str(diffid)])
920 for k in _metanamemap.keys():
920 for k in _metanamemap.keys():
921 if k in meta:
921 if k in meta:
922 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
922 header += b'# %s %s\n' % (_metanamemap[k], meta[k])
923
923
924 content = b'%s%s\n%s' % (header, desc, body)
924 content = b'%s%s\n%s' % (header, desc, body)
925 write(encoding.unitolocal(content))
925 write(encoding.unitolocal(content))
926
926
927 @vcrcommand(b'phabread',
927 @vcrcommand(b'phabread',
928 [(b'', b'stack', False, _(b'read dependencies'))],
928 [(b'', b'stack', False, _(b'read dependencies'))],
929 _(b'DREVSPEC [OPTIONS]'),
929 _(b'DREVSPEC [OPTIONS]'),
930 helpcategory=command.CATEGORY_IMPORT_EXPORT)
930 helpcategory=command.CATEGORY_IMPORT_EXPORT)
931 def phabread(ui, repo, spec, **opts):
931 def phabread(ui, repo, spec, **opts):
932 """print patches from Phabricator suitable for importing
932 """print patches from Phabricator suitable for importing
933
933
934 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
934 DREVSPEC could be a Differential Revision identity, like ``D123``, or just
935 the number ``123``. It could also have common operators like ``+``, ``-``,
935 the number ``123``. It could also have common operators like ``+``, ``-``,
936 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
936 ``&``, ``(``, ``)`` for complex queries. Prefix ``:`` could be used to
937 select a stack.
937 select a stack.
938
938
939 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
939 ``abandoned``, ``accepted``, ``closed``, ``needsreview``, ``needsrevision``
940 could be used to filter patches by status. For performance reason, they
940 could be used to filter patches by status. For performance reason, they
941 only represent a subset of non-status selections and cannot be used alone.
941 only represent a subset of non-status selections and cannot be used alone.
942
942
943 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
943 For example, ``:D6+8-(2+D4)`` selects a stack up to D6, plus D8 and exclude
944 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
944 D2 and D4. ``:D9 & needsreview`` selects "Needs Review" revisions in a
945 stack up to D9.
945 stack up to D9.
946
946
947 If --stack is given, follow dependencies information and read all patches.
947 If --stack is given, follow dependencies information and read all patches.
948 It is equivalent to the ``:`` operator.
948 It is equivalent to the ``:`` operator.
949 """
949 """
950 if opts.get(b'stack'):
950 if opts.get(b'stack'):
951 spec = b':(%s)' % spec
951 spec = b':(%s)' % spec
952 drevs = querydrev(repo, spec)
952 drevs = querydrev(repo, spec)
953 readpatch(repo, drevs, ui.write)
953 readpatch(repo, drevs, ui.write)
954
954
955 @vcrcommand(b'phabupdate',
955 @vcrcommand(b'phabupdate',
956 [(b'', b'accept', False, _(b'accept revisions')),
956 [(b'', b'accept', False, _(b'accept revisions')),
957 (b'', b'reject', False, _(b'reject revisions')),
957 (b'', b'reject', False, _(b'reject revisions')),
958 (b'', b'abandon', False, _(b'abandon revisions')),
958 (b'', b'abandon', False, _(b'abandon revisions')),
959 (b'', b'reclaim', False, _(b'reclaim revisions')),
959 (b'', b'reclaim', False, _(b'reclaim revisions')),
960 (b'm', b'comment', b'', _(b'comment on the last revision')),
960 (b'm', b'comment', b'', _(b'comment on the last revision')),
961 ], _(b'DREVSPEC [OPTIONS]'),
961 ], _(b'DREVSPEC [OPTIONS]'),
962 helpcategory=command.CATEGORY_IMPORT_EXPORT)
962 helpcategory=command.CATEGORY_IMPORT_EXPORT)
963 def phabupdate(ui, repo, spec, **opts):
963 def phabupdate(ui, repo, spec, **opts):
964 """update Differential Revision in batch
964 """update Differential Revision in batch
965
965
966 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
966 DREVSPEC selects revisions. See :hg:`help phabread` for its usage.
967 """
967 """
968 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
968 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)]
969 if len(flags) > 1:
969 if len(flags) > 1:
970 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
970 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags))
971
971
972 actions = []
972 actions = []
973 for f in flags:
973 for f in flags:
974 actions.append({b'type': f, b'value': b'true'})
974 actions.append({b'type': f, b'value': b'true'})
975
975
976 drevs = querydrev(repo, spec)
976 drevs = querydrev(repo, spec)
977 for i, drev in enumerate(drevs):
977 for i, drev in enumerate(drevs):
978 if i + 1 == len(drevs) and opts.get(b'comment'):
978 if i + 1 == len(drevs) and opts.get(b'comment'):
979 actions.append({b'type': b'comment', b'value': opts[b'comment']})
979 actions.append({b'type': b'comment', b'value': opts[b'comment']})
980 if actions:
980 if actions:
981 params = {b'objectIdentifier': drev[r'phid'],
981 params = {b'objectIdentifier': drev[r'phid'],
982 b'transactions': actions}
982 b'transactions': actions}
983 callconduit(repo, b'differential.revision.edit', params)
983 callconduit(repo, b'differential.revision.edit', params)
984
984
985 templatekeyword = registrar.templatekeyword()
985 templatekeyword = registrar.templatekeyword()
986
986
987 @templatekeyword(b'phabreview', requires={b'ctx'})
987 @templatekeyword(b'phabreview', requires={b'ctx'})
988 def template_review(context, mapping):
988 def template_review(context, mapping):
989 """:phabreview: Object describing the review for this changeset.
989 """:phabreview: Object describing the review for this changeset.
990 Has attributes `url` and `id`.
990 Has attributes `url` and `id`.
991 """
991 """
992 ctx = context.resource(mapping, b'ctx')
992 ctx = context.resource(mapping, b'ctx')
993 m = _differentialrevisiondescre.search(ctx.description())
993 m = _differentialrevisiondescre.search(ctx.description())
994 if m:
994 if m:
995 return templateutil.hybriddict({
995 return templateutil.hybriddict({
996 b'url': m.group(b'url'),
996 b'url': m.group(b'url'),
997 b'id': b"D{}".format(m.group(b'id')),
997 b'id': b"D{}".format(m.group(b'id')),
998 })
998 })
999 else:
999 else:
1000 tags = ctx.repo().nodetags(ctx.node())
1000 tags = ctx.repo().nodetags(ctx.node())
1001 for t in tags:
1001 for t in tags:
1002 if _differentialrevisiontagre.match(t):
1002 if _differentialrevisiontagre.match(t):
1003 url = ctx.repo().ui.config(b'phabricator', b'url')
1003 url = ctx.repo().ui.config(b'phabricator', b'url')
1004 if not url.endswith(b'/'):
1004 if not url.endswith(b'/'):
1005 url += b'/'
1005 url += b'/'
1006 url += t
1006 url += t
1007
1007
1008 return templateutil.hybriddict({
1008 return templateutil.hybriddict({
1009 b'url': url,
1009 b'url': url,
1010 b'id': t,
1010 b'id': t,
1011 })
1011 })
1012 return None
1012 return None
@@ -1,641 +1,641
1 # Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
1 # Copyright 2017-present Gregory Szorc <gregory.szorc@gmail.com>
2 #
2 #
3 # This software may be used and distributed according to the terms of the
3 # This software may be used and distributed according to the terms of the
4 # GNU General Public License version 2 or any later version.
4 # GNU General Public License version 2 or any later version.
5
5
6 """generate release notes from commit messages (EXPERIMENTAL)
6 """generate release notes from commit messages (EXPERIMENTAL)
7
7
8 It is common to maintain files detailing changes in a project between
8 It is common to maintain files detailing changes in a project between
9 releases. Maintaining these files can be difficult and time consuming.
9 releases. Maintaining these files can be difficult and time consuming.
10 The :hg:`releasenotes` command provided by this extension makes the
10 The :hg:`releasenotes` command provided by this extension makes the
11 process simpler by automating it.
11 process simpler by automating it.
12 """
12 """
13
13
14 from __future__ import absolute_import
14 from __future__ import absolute_import
15
15
16 import difflib
16 import difflib
17 import errno
17 import errno
18 import re
18 import re
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import (
21 from mercurial import (
22 config,
22 config,
23 error,
23 error,
24 minirst,
24 minirst,
25 node,
25 node,
26 pycompat,
26 pycompat,
27 registrar,
27 registrar,
28 scmutil,
28 scmutil,
29 util,
29 util,
30 )
30 )
31 from mercurial.utils import (
31 from mercurial.utils import (
32 stringutil,
32 stringutil,
33 )
33 )
34
34
35 cmdtable = {}
35 cmdtable = {}
36 command = registrar.command(cmdtable)
36 command = registrar.command(cmdtable)
37
37
38 try:
38 try:
39 import fuzzywuzzy.fuzz as fuzz
39 import fuzzywuzzy.fuzz as fuzz
40 fuzz.token_set_ratio
40 fuzz.token_set_ratio
41 except ImportError:
41 except ImportError:
42 fuzz = None
42 fuzz = None
43
43
44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
44 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
45 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
46 # be specifying the version(s) of Mercurial they are tested with, or
46 # be specifying the version(s) of Mercurial they are tested with, or
47 # leave the attribute unspecified.
47 # leave the attribute unspecified.
48 testedwith = 'ships-with-hg-core'
48 testedwith = 'ships-with-hg-core'
49
49
50 DEFAULT_SECTIONS = [
50 DEFAULT_SECTIONS = [
51 ('feature', _('New Features')),
51 ('feature', _('New Features')),
52 ('bc', _('Backwards Compatibility Changes')),
52 ('bc', _('Backwards Compatibility Changes')),
53 ('fix', _('Bug Fixes')),
53 ('fix', _('Bug Fixes')),
54 ('perf', _('Performance Improvements')),
54 ('perf', _('Performance Improvements')),
55 ('api', _('API Changes')),
55 ('api', _('API Changes')),
56 ]
56 ]
57
57
58 RE_DIRECTIVE = re.compile('^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
58 RE_DIRECTIVE = re.compile(br'^\.\. ([a-zA-Z0-9_]+)::\s*([^$]+)?$')
59 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
59 RE_ISSUE = br'\bissue ?[0-9]{4,6}(?![0-9])\b'
60
60
61 BULLET_SECTION = _('Other Changes')
61 BULLET_SECTION = _('Other Changes')
62
62
63 class parsedreleasenotes(object):
63 class parsedreleasenotes(object):
64 def __init__(self):
64 def __init__(self):
65 self.sections = {}
65 self.sections = {}
66
66
67 def __contains__(self, section):
67 def __contains__(self, section):
68 return section in self.sections
68 return section in self.sections
69
69
70 def __iter__(self):
70 def __iter__(self):
71 return iter(sorted(self.sections))
71 return iter(sorted(self.sections))
72
72
73 def addtitleditem(self, section, title, paragraphs):
73 def addtitleditem(self, section, title, paragraphs):
74 """Add a titled release note entry."""
74 """Add a titled release note entry."""
75 self.sections.setdefault(section, ([], []))
75 self.sections.setdefault(section, ([], []))
76 self.sections[section][0].append((title, paragraphs))
76 self.sections[section][0].append((title, paragraphs))
77
77
78 def addnontitleditem(self, section, paragraphs):
78 def addnontitleditem(self, section, paragraphs):
79 """Adds a non-titled release note entry.
79 """Adds a non-titled release note entry.
80
80
81 Will be rendered as a bullet point.
81 Will be rendered as a bullet point.
82 """
82 """
83 self.sections.setdefault(section, ([], []))
83 self.sections.setdefault(section, ([], []))
84 self.sections[section][1].append(paragraphs)
84 self.sections[section][1].append(paragraphs)
85
85
86 def titledforsection(self, section):
86 def titledforsection(self, section):
87 """Returns titled entries in a section.
87 """Returns titled entries in a section.
88
88
89 Returns a list of (title, paragraphs) tuples describing sub-sections.
89 Returns a list of (title, paragraphs) tuples describing sub-sections.
90 """
90 """
91 return self.sections.get(section, ([], []))[0]
91 return self.sections.get(section, ([], []))[0]
92
92
93 def nontitledforsection(self, section):
93 def nontitledforsection(self, section):
94 """Returns non-titled, bulleted paragraphs in a section."""
94 """Returns non-titled, bulleted paragraphs in a section."""
95 return self.sections.get(section, ([], []))[1]
95 return self.sections.get(section, ([], []))[1]
96
96
97 def hastitledinsection(self, section, title):
97 def hastitledinsection(self, section, title):
98 return any(t[0] == title for t in self.titledforsection(section))
98 return any(t[0] == title for t in self.titledforsection(section))
99
99
100 def merge(self, ui, other):
100 def merge(self, ui, other):
101 """Merge another instance into this one.
101 """Merge another instance into this one.
102
102
103 This is used to combine multiple sources of release notes together.
103 This is used to combine multiple sources of release notes together.
104 """
104 """
105 if not fuzz:
105 if not fuzz:
106 ui.warn(_("module 'fuzzywuzzy' not found, merging of similar "
106 ui.warn(_("module 'fuzzywuzzy' not found, merging of similar "
107 "releasenotes is disabled\n"))
107 "releasenotes is disabled\n"))
108
108
109 for section in other:
109 for section in other:
110 existingnotes = converttitled(self.titledforsection(section)) + \
110 existingnotes = converttitled(self.titledforsection(section)) + \
111 convertnontitled(self.nontitledforsection(section))
111 convertnontitled(self.nontitledforsection(section))
112 for title, paragraphs in other.titledforsection(section):
112 for title, paragraphs in other.titledforsection(section):
113 if self.hastitledinsection(section, title):
113 if self.hastitledinsection(section, title):
114 # TODO prompt for resolution if different and running in
114 # TODO prompt for resolution if different and running in
115 # interactive mode.
115 # interactive mode.
116 ui.write(_('%s already exists in %s section; ignoring\n') %
116 ui.write(_('%s already exists in %s section; ignoring\n') %
117 (title, section))
117 (title, section))
118 continue
118 continue
119
119
120 incoming_str = converttitled([(title, paragraphs)])[0]
120 incoming_str = converttitled([(title, paragraphs)])[0]
121 if section == 'fix':
121 if section == 'fix':
122 issue = getissuenum(incoming_str)
122 issue = getissuenum(incoming_str)
123 if issue:
123 if issue:
124 if findissue(ui, existingnotes, issue):
124 if findissue(ui, existingnotes, issue):
125 continue
125 continue
126
126
127 if similar(ui, existingnotes, incoming_str):
127 if similar(ui, existingnotes, incoming_str):
128 continue
128 continue
129
129
130 self.addtitleditem(section, title, paragraphs)
130 self.addtitleditem(section, title, paragraphs)
131
131
132 for paragraphs in other.nontitledforsection(section):
132 for paragraphs in other.nontitledforsection(section):
133 if paragraphs in self.nontitledforsection(section):
133 if paragraphs in self.nontitledforsection(section):
134 continue
134 continue
135
135
136 incoming_str = convertnontitled([paragraphs])[0]
136 incoming_str = convertnontitled([paragraphs])[0]
137 if section == 'fix':
137 if section == 'fix':
138 issue = getissuenum(incoming_str)
138 issue = getissuenum(incoming_str)
139 if issue:
139 if issue:
140 if findissue(ui, existingnotes, issue):
140 if findissue(ui, existingnotes, issue):
141 continue
141 continue
142
142
143 if similar(ui, existingnotes, incoming_str):
143 if similar(ui, existingnotes, incoming_str):
144 continue
144 continue
145
145
146 self.addnontitleditem(section, paragraphs)
146 self.addnontitleditem(section, paragraphs)
147
147
148 class releasenotessections(object):
148 class releasenotessections(object):
149 def __init__(self, ui, repo=None):
149 def __init__(self, ui, repo=None):
150 if repo:
150 if repo:
151 sections = util.sortdict(DEFAULT_SECTIONS)
151 sections = util.sortdict(DEFAULT_SECTIONS)
152 custom_sections = getcustomadmonitions(repo)
152 custom_sections = getcustomadmonitions(repo)
153 if custom_sections:
153 if custom_sections:
154 sections.update(custom_sections)
154 sections.update(custom_sections)
155 self._sections = list(sections.iteritems())
155 self._sections = list(sections.iteritems())
156 else:
156 else:
157 self._sections = list(DEFAULT_SECTIONS)
157 self._sections = list(DEFAULT_SECTIONS)
158
158
159 def __iter__(self):
159 def __iter__(self):
160 return iter(self._sections)
160 return iter(self._sections)
161
161
162 def names(self):
162 def names(self):
163 return [t[0] for t in self._sections]
163 return [t[0] for t in self._sections]
164
164
165 def sectionfromtitle(self, title):
165 def sectionfromtitle(self, title):
166 for name, value in self._sections:
166 for name, value in self._sections:
167 if value == title:
167 if value == title:
168 return name
168 return name
169
169
170 return None
170 return None
171
171
172 def converttitled(titledparagraphs):
172 def converttitled(titledparagraphs):
173 """
173 """
174 Convert titled paragraphs to strings
174 Convert titled paragraphs to strings
175 """
175 """
176 string_list = []
176 string_list = []
177 for title, paragraphs in titledparagraphs:
177 for title, paragraphs in titledparagraphs:
178 lines = []
178 lines = []
179 for para in paragraphs:
179 for para in paragraphs:
180 lines.extend(para)
180 lines.extend(para)
181 string_list.append(' '.join(lines))
181 string_list.append(' '.join(lines))
182 return string_list
182 return string_list
183
183
184 def convertnontitled(nontitledparagraphs):
184 def convertnontitled(nontitledparagraphs):
185 """
185 """
186 Convert non-titled bullets to strings
186 Convert non-titled bullets to strings
187 """
187 """
188 string_list = []
188 string_list = []
189 for paragraphs in nontitledparagraphs:
189 for paragraphs in nontitledparagraphs:
190 lines = []
190 lines = []
191 for para in paragraphs:
191 for para in paragraphs:
192 lines.extend(para)
192 lines.extend(para)
193 string_list.append(' '.join(lines))
193 string_list.append(' '.join(lines))
194 return string_list
194 return string_list
195
195
196 def getissuenum(incoming_str):
196 def getissuenum(incoming_str):
197 """
197 """
198 Returns issue number from the incoming string if it exists
198 Returns issue number from the incoming string if it exists
199 """
199 """
200 issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
200 issue = re.search(RE_ISSUE, incoming_str, re.IGNORECASE)
201 if issue:
201 if issue:
202 issue = issue.group()
202 issue = issue.group()
203 return issue
203 return issue
204
204
205 def findissue(ui, existing, issue):
205 def findissue(ui, existing, issue):
206 """
206 """
207 Returns true if issue number already exists in notes.
207 Returns true if issue number already exists in notes.
208 """
208 """
209 if any(issue in s for s in existing):
209 if any(issue in s for s in existing):
210 ui.write(_('"%s" already exists in notes; ignoring\n') % issue)
210 ui.write(_('"%s" already exists in notes; ignoring\n') % issue)
211 return True
211 return True
212 else:
212 else:
213 return False
213 return False
214
214
215 def similar(ui, existing, incoming_str):
215 def similar(ui, existing, incoming_str):
216 """
216 """
217 Returns true if similar note found in existing notes.
217 Returns true if similar note found in existing notes.
218 """
218 """
219 if len(incoming_str.split()) > 10:
219 if len(incoming_str.split()) > 10:
220 merge = similaritycheck(incoming_str, existing)
220 merge = similaritycheck(incoming_str, existing)
221 if not merge:
221 if not merge:
222 ui.write(_('"%s" already exists in notes file; ignoring\n')
222 ui.write(_('"%s" already exists in notes file; ignoring\n')
223 % incoming_str)
223 % incoming_str)
224 return True
224 return True
225 else:
225 else:
226 return False
226 return False
227 else:
227 else:
228 return False
228 return False
229
229
230 def similaritycheck(incoming_str, existingnotes):
230 def similaritycheck(incoming_str, existingnotes):
231 """
231 """
232 Returns false when note fragment can be merged to existing notes.
232 Returns false when note fragment can be merged to existing notes.
233 """
233 """
234 # fuzzywuzzy not present
234 # fuzzywuzzy not present
235 if not fuzz:
235 if not fuzz:
236 return True
236 return True
237
237
238 merge = True
238 merge = True
239 for bullet in existingnotes:
239 for bullet in existingnotes:
240 score = fuzz.token_set_ratio(incoming_str, bullet)
240 score = fuzz.token_set_ratio(incoming_str, bullet)
241 if score > 75:
241 if score > 75:
242 merge = False
242 merge = False
243 break
243 break
244 return merge
244 return merge
245
245
246 def getcustomadmonitions(repo):
246 def getcustomadmonitions(repo):
247 ctx = repo['.']
247 ctx = repo['.']
248 p = config.config()
248 p = config.config()
249
249
250 def read(f, sections=None, remap=None):
250 def read(f, sections=None, remap=None):
251 if f in ctx:
251 if f in ctx:
252 data = ctx[f].data()
252 data = ctx[f].data()
253 p.parse(f, data, sections, remap, read)
253 p.parse(f, data, sections, remap, read)
254 else:
254 else:
255 raise error.Abort(_(".hgreleasenotes file \'%s\' not found") %
255 raise error.Abort(_(".hgreleasenotes file \'%s\' not found") %
256 repo.pathto(f))
256 repo.pathto(f))
257
257
258 if '.hgreleasenotes' in ctx:
258 if '.hgreleasenotes' in ctx:
259 read('.hgreleasenotes')
259 read('.hgreleasenotes')
260 return p['sections']
260 return p['sections']
261
261
262 def checkadmonitions(ui, repo, directives, revs):
262 def checkadmonitions(ui, repo, directives, revs):
263 """
263 """
264 Checks the commit messages for admonitions and their validity.
264 Checks the commit messages for admonitions and their validity.
265
265
266 .. abcd::
266 .. abcd::
267
267
268 First paragraph under this admonition
268 First paragraph under this admonition
269
269
270 For this commit message, using `hg releasenotes -r . --check`
270 For this commit message, using `hg releasenotes -r . --check`
271 returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
271 returns: Invalid admonition 'abcd' present in changeset 3ea92981e103
272
272
273 As admonition 'abcd' is neither present in default nor custom admonitions
273 As admonition 'abcd' is neither present in default nor custom admonitions
274 """
274 """
275 for rev in revs:
275 for rev in revs:
276 ctx = repo[rev]
276 ctx = repo[rev]
277 admonition = re.search(RE_DIRECTIVE, ctx.description())
277 admonition = re.search(RE_DIRECTIVE, ctx.description())
278 if admonition:
278 if admonition:
279 if admonition.group(1) in directives:
279 if admonition.group(1) in directives:
280 continue
280 continue
281 else:
281 else:
282 ui.write(_("Invalid admonition '%s' present in changeset %s"
282 ui.write(_("Invalid admonition '%s' present in changeset %s"
283 "\n") % (admonition.group(1), ctx.hex()[:12]))
283 "\n") % (admonition.group(1), ctx.hex()[:12]))
284 sim = lambda x: difflib.SequenceMatcher(None,
284 sim = lambda x: difflib.SequenceMatcher(None,
285 admonition.group(1), x).ratio()
285 admonition.group(1), x).ratio()
286
286
287 similar = [s for s in directives if sim(s) > 0.6]
287 similar = [s for s in directives if sim(s) > 0.6]
288 if len(similar) == 1:
288 if len(similar) == 1:
289 ui.write(_("(did you mean %s?)\n") % similar[0])
289 ui.write(_("(did you mean %s?)\n") % similar[0])
290 elif similar:
290 elif similar:
291 ss = ", ".join(sorted(similar))
291 ss = ", ".join(sorted(similar))
292 ui.write(_("(did you mean one of %s?)\n") % ss)
292 ui.write(_("(did you mean one of %s?)\n") % ss)
293
293
294 def _getadmonitionlist(ui, sections):
294 def _getadmonitionlist(ui, sections):
295 for section in sections:
295 for section in sections:
296 ui.write("%s: %s\n" % (section[0], section[1]))
296 ui.write("%s: %s\n" % (section[0], section[1]))
297
297
298 def parsenotesfromrevisions(repo, directives, revs):
298 def parsenotesfromrevisions(repo, directives, revs):
299 notes = parsedreleasenotes()
299 notes = parsedreleasenotes()
300
300
301 for rev in revs:
301 for rev in revs:
302 ctx = repo[rev]
302 ctx = repo[rev]
303
303
304 blocks, pruned = minirst.parse(ctx.description(),
304 blocks, pruned = minirst.parse(ctx.description(),
305 admonitions=directives)
305 admonitions=directives)
306
306
307 for i, block in enumerate(blocks):
307 for i, block in enumerate(blocks):
308 if block['type'] != 'admonition':
308 if block['type'] != 'admonition':
309 continue
309 continue
310
310
311 directive = block['admonitiontitle']
311 directive = block['admonitiontitle']
312 title = block['lines'][0].strip() if block['lines'] else None
312 title = block['lines'][0].strip() if block['lines'] else None
313
313
314 if i + 1 == len(blocks):
314 if i + 1 == len(blocks):
315 raise error.Abort(_('changeset %s: release notes directive %s '
315 raise error.Abort(_('changeset %s: release notes directive %s '
316 'lacks content') % (ctx, directive))
316 'lacks content') % (ctx, directive))
317
317
318 # Now search ahead and find all paragraphs attached to this
318 # Now search ahead and find all paragraphs attached to this
319 # admonition.
319 # admonition.
320 paragraphs = []
320 paragraphs = []
321 for j in range(i + 1, len(blocks)):
321 for j in range(i + 1, len(blocks)):
322 pblock = blocks[j]
322 pblock = blocks[j]
323
323
324 # Margin blocks may appear between paragraphs. Ignore them.
324 # Margin blocks may appear between paragraphs. Ignore them.
325 if pblock['type'] == 'margin':
325 if pblock['type'] == 'margin':
326 continue
326 continue
327
327
328 if pblock['type'] == 'admonition':
328 if pblock['type'] == 'admonition':
329 break
329 break
330
330
331 if pblock['type'] != 'paragraph':
331 if pblock['type'] != 'paragraph':
332 repo.ui.warn(_('changeset %s: unexpected block in release '
332 repo.ui.warn(_('changeset %s: unexpected block in release '
333 'notes directive %s\n') % (ctx, directive))
333 'notes directive %s\n') % (ctx, directive))
334
334
335 if pblock['indent'] > 0:
335 if pblock['indent'] > 0:
336 paragraphs.append(pblock['lines'])
336 paragraphs.append(pblock['lines'])
337 else:
337 else:
338 break
338 break
339
339
340 # TODO consider using title as paragraph for more concise notes.
340 # TODO consider using title as paragraph for more concise notes.
341 if not paragraphs:
341 if not paragraphs:
342 repo.ui.warn(_("error parsing releasenotes for revision: "
342 repo.ui.warn(_("error parsing releasenotes for revision: "
343 "'%s'\n") % node.hex(ctx.node()))
343 "'%s'\n") % node.hex(ctx.node()))
344 if title:
344 if title:
345 notes.addtitleditem(directive, title, paragraphs)
345 notes.addtitleditem(directive, title, paragraphs)
346 else:
346 else:
347 notes.addnontitleditem(directive, paragraphs)
347 notes.addnontitleditem(directive, paragraphs)
348
348
349 return notes
349 return notes
350
350
351 def parsereleasenotesfile(sections, text):
351 def parsereleasenotesfile(sections, text):
352 """Parse text content containing generated release notes."""
352 """Parse text content containing generated release notes."""
353 notes = parsedreleasenotes()
353 notes = parsedreleasenotes()
354
354
355 blocks = minirst.parse(text)[0]
355 blocks = minirst.parse(text)[0]
356
356
357 def gatherparagraphsbullets(offset, title=False):
357 def gatherparagraphsbullets(offset, title=False):
358 notefragment = []
358 notefragment = []
359
359
360 for i in range(offset + 1, len(blocks)):
360 for i in range(offset + 1, len(blocks)):
361 block = blocks[i]
361 block = blocks[i]
362
362
363 if block['type'] == 'margin':
363 if block['type'] == 'margin':
364 continue
364 continue
365 elif block['type'] == 'section':
365 elif block['type'] == 'section':
366 break
366 break
367 elif block['type'] == 'bullet':
367 elif block['type'] == 'bullet':
368 if block['indent'] != 0:
368 if block['indent'] != 0:
369 raise error.Abort(_('indented bullet lists not supported'))
369 raise error.Abort(_('indented bullet lists not supported'))
370 if title:
370 if title:
371 lines = [l[1:].strip() for l in block['lines']]
371 lines = [l[1:].strip() for l in block['lines']]
372 notefragment.append(lines)
372 notefragment.append(lines)
373 continue
373 continue
374 else:
374 else:
375 lines = [[l[1:].strip() for l in block['lines']]]
375 lines = [[l[1:].strip() for l in block['lines']]]
376
376
377 for block in blocks[i + 1:]:
377 for block in blocks[i + 1:]:
378 if block['type'] in ('bullet', 'section'):
378 if block['type'] in ('bullet', 'section'):
379 break
379 break
380 if block['type'] == 'paragraph':
380 if block['type'] == 'paragraph':
381 lines.append(block['lines'])
381 lines.append(block['lines'])
382 notefragment.append(lines)
382 notefragment.append(lines)
383 continue
383 continue
384 elif block['type'] != 'paragraph':
384 elif block['type'] != 'paragraph':
385 raise error.Abort(_('unexpected block type in release notes: '
385 raise error.Abort(_('unexpected block type in release notes: '
386 '%s') % block['type'])
386 '%s') % block['type'])
387 if title:
387 if title:
388 notefragment.append(block['lines'])
388 notefragment.append(block['lines'])
389
389
390 return notefragment
390 return notefragment
391
391
392 currentsection = None
392 currentsection = None
393 for i, block in enumerate(blocks):
393 for i, block in enumerate(blocks):
394 if block['type'] != 'section':
394 if block['type'] != 'section':
395 continue
395 continue
396
396
397 title = block['lines'][0]
397 title = block['lines'][0]
398
398
399 # TODO the parsing around paragraphs and bullet points needs some
399 # TODO the parsing around paragraphs and bullet points needs some
400 # work.
400 # work.
401 if block['underline'] == '=': # main section
401 if block['underline'] == '=': # main section
402 name = sections.sectionfromtitle(title)
402 name = sections.sectionfromtitle(title)
403 if not name:
403 if not name:
404 raise error.Abort(_('unknown release notes section: %s') %
404 raise error.Abort(_('unknown release notes section: %s') %
405 title)
405 title)
406
406
407 currentsection = name
407 currentsection = name
408 bullet_points = gatherparagraphsbullets(i)
408 bullet_points = gatherparagraphsbullets(i)
409 if bullet_points:
409 if bullet_points:
410 for para in bullet_points:
410 for para in bullet_points:
411 notes.addnontitleditem(currentsection, para)
411 notes.addnontitleditem(currentsection, para)
412
412
413 elif block['underline'] == '-': # sub-section
413 elif block['underline'] == '-': # sub-section
414 if title == BULLET_SECTION:
414 if title == BULLET_SECTION:
415 bullet_points = gatherparagraphsbullets(i)
415 bullet_points = gatherparagraphsbullets(i)
416 for para in bullet_points:
416 for para in bullet_points:
417 notes.addnontitleditem(currentsection, para)
417 notes.addnontitleditem(currentsection, para)
418 else:
418 else:
419 paragraphs = gatherparagraphsbullets(i, True)
419 paragraphs = gatherparagraphsbullets(i, True)
420 notes.addtitleditem(currentsection, title, paragraphs)
420 notes.addtitleditem(currentsection, title, paragraphs)
421 else:
421 else:
422 raise error.Abort(_('unsupported section type for %s') % title)
422 raise error.Abort(_('unsupported section type for %s') % title)
423
423
424 return notes
424 return notes
425
425
426 def serializenotes(sections, notes):
426 def serializenotes(sections, notes):
427 """Serialize release notes from parsed fragments and notes.
427 """Serialize release notes from parsed fragments and notes.
428
428
429 This function essentially takes the output of ``parsenotesfromrevisions()``
429 This function essentially takes the output of ``parsenotesfromrevisions()``
430 and ``parserelnotesfile()`` and produces output combining the 2.
430 and ``parserelnotesfile()`` and produces output combining the 2.
431 """
431 """
432 lines = []
432 lines = []
433
433
434 for sectionname, sectiontitle in sections:
434 for sectionname, sectiontitle in sections:
435 if sectionname not in notes:
435 if sectionname not in notes:
436 continue
436 continue
437
437
438 lines.append(sectiontitle)
438 lines.append(sectiontitle)
439 lines.append('=' * len(sectiontitle))
439 lines.append('=' * len(sectiontitle))
440 lines.append('')
440 lines.append('')
441
441
442 # First pass to emit sub-sections.
442 # First pass to emit sub-sections.
443 for title, paragraphs in notes.titledforsection(sectionname):
443 for title, paragraphs in notes.titledforsection(sectionname):
444 lines.append(title)
444 lines.append(title)
445 lines.append('-' * len(title))
445 lines.append('-' * len(title))
446 lines.append('')
446 lines.append('')
447
447
448 for i, para in enumerate(paragraphs):
448 for i, para in enumerate(paragraphs):
449 if i:
449 if i:
450 lines.append('')
450 lines.append('')
451 lines.extend(stringutil.wrap(' '.join(para),
451 lines.extend(stringutil.wrap(' '.join(para),
452 width=78).splitlines())
452 width=78).splitlines())
453
453
454 lines.append('')
454 lines.append('')
455
455
456 # Second pass to emit bullet list items.
456 # Second pass to emit bullet list items.
457
457
458 # If the section has titled and non-titled items, we can't
458 # If the section has titled and non-titled items, we can't
459 # simply emit the bullet list because it would appear to come
459 # simply emit the bullet list because it would appear to come
460 # from the last title/section. So, we emit a new sub-section
460 # from the last title/section. So, we emit a new sub-section
461 # for the non-titled items.
461 # for the non-titled items.
462 nontitled = notes.nontitledforsection(sectionname)
462 nontitled = notes.nontitledforsection(sectionname)
463 if notes.titledforsection(sectionname) and nontitled:
463 if notes.titledforsection(sectionname) and nontitled:
464 # TODO make configurable.
464 # TODO make configurable.
465 lines.append(BULLET_SECTION)
465 lines.append(BULLET_SECTION)
466 lines.append('-' * len(BULLET_SECTION))
466 lines.append('-' * len(BULLET_SECTION))
467 lines.append('')
467 lines.append('')
468
468
469 for paragraphs in nontitled:
469 for paragraphs in nontitled:
470 lines.extend(stringutil.wrap(' '.join(paragraphs[0]),
470 lines.extend(stringutil.wrap(' '.join(paragraphs[0]),
471 width=78,
471 width=78,
472 initindent='* ',
472 initindent='* ',
473 hangindent=' ').splitlines())
473 hangindent=' ').splitlines())
474
474
475 for para in paragraphs[1:]:
475 for para in paragraphs[1:]:
476 lines.append('')
476 lines.append('')
477 lines.extend(stringutil.wrap(' '.join(para),
477 lines.extend(stringutil.wrap(' '.join(para),
478 width=78,
478 width=78,
479 initindent=' ',
479 initindent=' ',
480 hangindent=' ').splitlines())
480 hangindent=' ').splitlines())
481
481
482 lines.append('')
482 lines.append('')
483
483
484 if lines and lines[-1]:
484 if lines and lines[-1]:
485 lines.append('')
485 lines.append('')
486
486
487 return '\n'.join(lines)
487 return '\n'.join(lines)
488
488
489 @command('releasenotes',
489 @command('releasenotes',
490 [('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
490 [('r', 'rev', '', _('revisions to process for release notes'), _('REV')),
491 ('c', 'check', False, _('checks for validity of admonitions (if any)'),
491 ('c', 'check', False, _('checks for validity of admonitions (if any)'),
492 _('REV')),
492 _('REV')),
493 ('l', 'list', False, _('list the available admonitions with their title'),
493 ('l', 'list', False, _('list the available admonitions with their title'),
494 None)],
494 None)],
495 _('hg releasenotes [-r REV] [-c] FILE'),
495 _('hg releasenotes [-r REV] [-c] FILE'),
496 helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
496 helpcategory=command.CATEGORY_CHANGE_NAVIGATION)
497 def releasenotes(ui, repo, file_=None, **opts):
497 def releasenotes(ui, repo, file_=None, **opts):
498 """parse release notes from commit messages into an output file
498 """parse release notes from commit messages into an output file
499
499
500 Given an output file and set of revisions, this command will parse commit
500 Given an output file and set of revisions, this command will parse commit
501 messages for release notes then add them to the output file.
501 messages for release notes then add them to the output file.
502
502
503 Release notes are defined in commit messages as ReStructuredText
503 Release notes are defined in commit messages as ReStructuredText
504 directives. These have the form::
504 directives. These have the form::
505
505
506 .. directive:: title
506 .. directive:: title
507
507
508 content
508 content
509
509
510 Each ``directive`` maps to an output section in a generated release notes
510 Each ``directive`` maps to an output section in a generated release notes
511 file, which itself is ReStructuredText. For example, the ``.. feature::``
511 file, which itself is ReStructuredText. For example, the ``.. feature::``
512 directive would map to a ``New Features`` section.
512 directive would map to a ``New Features`` section.
513
513
514 Release note directives can be either short-form or long-form. In short-
514 Release note directives can be either short-form or long-form. In short-
515 form, ``title`` is omitted and the release note is rendered as a bullet
515 form, ``title`` is omitted and the release note is rendered as a bullet
516 list. In long form, a sub-section with the title ``title`` is added to the
516 list. In long form, a sub-section with the title ``title`` is added to the
517 section.
517 section.
518
518
519 The ``FILE`` argument controls the output file to write gathered release
519 The ``FILE`` argument controls the output file to write gathered release
520 notes to. The format of the file is::
520 notes to. The format of the file is::
521
521
522 Section 1
522 Section 1
523 =========
523 =========
524
524
525 ...
525 ...
526
526
527 Section 2
527 Section 2
528 =========
528 =========
529
529
530 ...
530 ...
531
531
532 Only sections with defined release notes are emitted.
532 Only sections with defined release notes are emitted.
533
533
534 If a section only has short-form notes, it will consist of bullet list::
534 If a section only has short-form notes, it will consist of bullet list::
535
535
536 Section
536 Section
537 =======
537 =======
538
538
539 * Release note 1
539 * Release note 1
540 * Release note 2
540 * Release note 2
541
541
542 If a section has long-form notes, sub-sections will be emitted::
542 If a section has long-form notes, sub-sections will be emitted::
543
543
544 Section
544 Section
545 =======
545 =======
546
546
547 Note 1 Title
547 Note 1 Title
548 ------------
548 ------------
549
549
550 Description of the first long-form note.
550 Description of the first long-form note.
551
551
552 Note 2 Title
552 Note 2 Title
553 ------------
553 ------------
554
554
555 Description of the second long-form note.
555 Description of the second long-form note.
556
556
557 If the ``FILE`` argument points to an existing file, that file will be
557 If the ``FILE`` argument points to an existing file, that file will be
558 parsed for release notes having the format that would be generated by this
558 parsed for release notes having the format that would be generated by this
559 command. The notes from the processed commit messages will be *merged*
559 command. The notes from the processed commit messages will be *merged*
560 into this parsed set.
560 into this parsed set.
561
561
562 During release notes merging:
562 During release notes merging:
563
563
564 * Duplicate items are automatically ignored
564 * Duplicate items are automatically ignored
565 * Items that are different are automatically ignored if the similarity is
565 * Items that are different are automatically ignored if the similarity is
566 greater than a threshold.
566 greater than a threshold.
567
567
568 This means that the release notes file can be updated independently from
568 This means that the release notes file can be updated independently from
569 this command and changes should not be lost when running this command on
569 this command and changes should not be lost when running this command on
570 that file. A particular use case for this is to tweak the wording of a
570 that file. A particular use case for this is to tweak the wording of a
571 release note after it has been added to the release notes file.
571 release note after it has been added to the release notes file.
572
572
573 The -c/--check option checks the commit message for invalid admonitions.
573 The -c/--check option checks the commit message for invalid admonitions.
574
574
575 The -l/--list option, presents the user with a list of existing available
575 The -l/--list option, presents the user with a list of existing available
576 admonitions along with their title. This also includes the custom
576 admonitions along with their title. This also includes the custom
577 admonitions (if any).
577 admonitions (if any).
578 """
578 """
579
579
580 opts = pycompat.byteskwargs(opts)
580 opts = pycompat.byteskwargs(opts)
581 sections = releasenotessections(ui, repo)
581 sections = releasenotessections(ui, repo)
582
582
583 listflag = opts.get('list')
583 listflag = opts.get('list')
584
584
585 if listflag and opts.get('rev'):
585 if listflag and opts.get('rev'):
586 raise error.Abort(_('cannot use both \'--list\' and \'--rev\''))
586 raise error.Abort(_('cannot use both \'--list\' and \'--rev\''))
587 if listflag and opts.get('check'):
587 if listflag and opts.get('check'):
588 raise error.Abort(_('cannot use both \'--list\' and \'--check\''))
588 raise error.Abort(_('cannot use both \'--list\' and \'--check\''))
589
589
590 if listflag:
590 if listflag:
591 return _getadmonitionlist(ui, sections)
591 return _getadmonitionlist(ui, sections)
592
592
593 rev = opts.get('rev')
593 rev = opts.get('rev')
594 revs = scmutil.revrange(repo, [rev or 'not public()'])
594 revs = scmutil.revrange(repo, [rev or 'not public()'])
595 if opts.get('check'):
595 if opts.get('check'):
596 return checkadmonitions(ui, repo, sections.names(), revs)
596 return checkadmonitions(ui, repo, sections.names(), revs)
597
597
598 incoming = parsenotesfromrevisions(repo, sections.names(), revs)
598 incoming = parsenotesfromrevisions(repo, sections.names(), revs)
599
599
600 if file_ is None:
600 if file_ is None:
601 ui.pager('releasenotes')
601 ui.pager('releasenotes')
602 return ui.write(serializenotes(sections, incoming))
602 return ui.write(serializenotes(sections, incoming))
603
603
604 try:
604 try:
605 with open(file_, 'rb') as fh:
605 with open(file_, 'rb') as fh:
606 notes = parsereleasenotesfile(sections, fh.read())
606 notes = parsereleasenotesfile(sections, fh.read())
607 except IOError as e:
607 except IOError as e:
608 if e.errno != errno.ENOENT:
608 if e.errno != errno.ENOENT:
609 raise
609 raise
610
610
611 notes = parsedreleasenotes()
611 notes = parsedreleasenotes()
612
612
613 notes.merge(ui, incoming)
613 notes.merge(ui, incoming)
614
614
615 with open(file_, 'wb') as fh:
615 with open(file_, 'wb') as fh:
616 fh.write(serializenotes(sections, notes))
616 fh.write(serializenotes(sections, notes))
617
617
618 @command('debugparsereleasenotes', norepo=True)
618 @command('debugparsereleasenotes', norepo=True)
619 def debugparsereleasenotes(ui, path, repo=None):
619 def debugparsereleasenotes(ui, path, repo=None):
620 """parse release notes and print resulting data structure"""
620 """parse release notes and print resulting data structure"""
621 if path == '-':
621 if path == '-':
622 text = pycompat.stdin.read()
622 text = pycompat.stdin.read()
623 else:
623 else:
624 with open(path, 'rb') as fh:
624 with open(path, 'rb') as fh:
625 text = fh.read()
625 text = fh.read()
626
626
627 sections = releasenotessections(ui, repo)
627 sections = releasenotessections(ui, repo)
628
628
629 notes = parsereleasenotesfile(sections, text)
629 notes = parsereleasenotesfile(sections, text)
630
630
631 for section in notes:
631 for section in notes:
632 ui.write(_('section: %s\n') % section)
632 ui.write(_('section: %s\n') % section)
633 for title, paragraphs in notes.titledforsection(section):
633 for title, paragraphs in notes.titledforsection(section):
634 ui.write(_(' subsection: %s\n') % title)
634 ui.write(_(' subsection: %s\n') % title)
635 for para in paragraphs:
635 for para in paragraphs:
636 ui.write(_(' paragraph: %s\n') % ' '.join(para))
636 ui.write(_(' paragraph: %s\n') % ' '.join(para))
637
637
638 for paragraphs in notes.nontitledforsection(section):
638 for paragraphs in notes.nontitledforsection(section):
639 ui.write(_(' bullet point:\n'))
639 ui.write(_(' bullet point:\n'))
640 for para in paragraphs:
640 for para in paragraphs:
641 ui.write(_(' paragraph: %s\n') % ' '.join(para))
641 ui.write(_(' paragraph: %s\n') % ' '.join(para))
@@ -1,533 +1,533
1 # utility for color output for Mercurial commands
1 # utility for color output for Mercurial commands
2 #
2 #
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> and other
3 # Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> and other
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import re
10 import re
11
11
12 from .i18n import _
12 from .i18n import _
13
13
14 from . import (
14 from . import (
15 encoding,
15 encoding,
16 pycompat,
16 pycompat,
17 )
17 )
18
18
19 from .utils import (
19 from .utils import (
20 stringutil,
20 stringutil,
21 )
21 )
22
22
23 try:
23 try:
24 import curses
24 import curses
25 # Mapping from effect name to terminfo attribute name (or raw code) or
25 # Mapping from effect name to terminfo attribute name (or raw code) or
26 # color number. This will also force-load the curses module.
26 # color number. This will also force-load the curses module.
27 _baseterminfoparams = {
27 _baseterminfoparams = {
28 'none': (True, 'sgr0', ''),
28 'none': (True, 'sgr0', ''),
29 'standout': (True, 'smso', ''),
29 'standout': (True, 'smso', ''),
30 'underline': (True, 'smul', ''),
30 'underline': (True, 'smul', ''),
31 'reverse': (True, 'rev', ''),
31 'reverse': (True, 'rev', ''),
32 'inverse': (True, 'rev', ''),
32 'inverse': (True, 'rev', ''),
33 'blink': (True, 'blink', ''),
33 'blink': (True, 'blink', ''),
34 'dim': (True, 'dim', ''),
34 'dim': (True, 'dim', ''),
35 'bold': (True, 'bold', ''),
35 'bold': (True, 'bold', ''),
36 'invisible': (True, 'invis', ''),
36 'invisible': (True, 'invis', ''),
37 'italic': (True, 'sitm', ''),
37 'italic': (True, 'sitm', ''),
38 'black': (False, curses.COLOR_BLACK, ''),
38 'black': (False, curses.COLOR_BLACK, ''),
39 'red': (False, curses.COLOR_RED, ''),
39 'red': (False, curses.COLOR_RED, ''),
40 'green': (False, curses.COLOR_GREEN, ''),
40 'green': (False, curses.COLOR_GREEN, ''),
41 'yellow': (False, curses.COLOR_YELLOW, ''),
41 'yellow': (False, curses.COLOR_YELLOW, ''),
42 'blue': (False, curses.COLOR_BLUE, ''),
42 'blue': (False, curses.COLOR_BLUE, ''),
43 'magenta': (False, curses.COLOR_MAGENTA, ''),
43 'magenta': (False, curses.COLOR_MAGENTA, ''),
44 'cyan': (False, curses.COLOR_CYAN, ''),
44 'cyan': (False, curses.COLOR_CYAN, ''),
45 'white': (False, curses.COLOR_WHITE, ''),
45 'white': (False, curses.COLOR_WHITE, ''),
46 }
46 }
47 except ImportError:
47 except ImportError:
48 curses = None
48 curses = None
49 _baseterminfoparams = {}
49 _baseterminfoparams = {}
50
50
51 # start and stop parameters for effects
51 # start and stop parameters for effects
52 _effects = {
52 _effects = {
53 'none': 0,
53 'none': 0,
54 'black': 30,
54 'black': 30,
55 'red': 31,
55 'red': 31,
56 'green': 32,
56 'green': 32,
57 'yellow': 33,
57 'yellow': 33,
58 'blue': 34,
58 'blue': 34,
59 'magenta': 35,
59 'magenta': 35,
60 'cyan': 36,
60 'cyan': 36,
61 'white': 37,
61 'white': 37,
62 'bold': 1,
62 'bold': 1,
63 'italic': 3,
63 'italic': 3,
64 'underline': 4,
64 'underline': 4,
65 'inverse': 7,
65 'inverse': 7,
66 'dim': 2,
66 'dim': 2,
67 'black_background': 40,
67 'black_background': 40,
68 'red_background': 41,
68 'red_background': 41,
69 'green_background': 42,
69 'green_background': 42,
70 'yellow_background': 43,
70 'yellow_background': 43,
71 'blue_background': 44,
71 'blue_background': 44,
72 'purple_background': 45,
72 'purple_background': 45,
73 'cyan_background': 46,
73 'cyan_background': 46,
74 'white_background': 47,
74 'white_background': 47,
75 }
75 }
76
76
77 _defaultstyles = {
77 _defaultstyles = {
78 'grep.match': 'red bold',
78 'grep.match': 'red bold',
79 'grep.linenumber': 'green',
79 'grep.linenumber': 'green',
80 'grep.rev': 'green',
80 'grep.rev': 'green',
81 'grep.change': 'green',
81 'grep.change': 'green',
82 'grep.sep': 'cyan',
82 'grep.sep': 'cyan',
83 'grep.filename': 'magenta',
83 'grep.filename': 'magenta',
84 'grep.user': 'magenta',
84 'grep.user': 'magenta',
85 'grep.date': 'magenta',
85 'grep.date': 'magenta',
86 'bookmarks.active': 'green',
86 'bookmarks.active': 'green',
87 'branches.active': 'none',
87 'branches.active': 'none',
88 'branches.closed': 'black bold',
88 'branches.closed': 'black bold',
89 'branches.current': 'green',
89 'branches.current': 'green',
90 'branches.inactive': 'none',
90 'branches.inactive': 'none',
91 'diff.changed': 'white',
91 'diff.changed': 'white',
92 'diff.deleted': 'red',
92 'diff.deleted': 'red',
93 'diff.deleted.changed': 'red bold underline',
93 'diff.deleted.changed': 'red bold underline',
94 'diff.deleted.unchanged': 'red',
94 'diff.deleted.unchanged': 'red',
95 'diff.diffline': 'bold',
95 'diff.diffline': 'bold',
96 'diff.extended': 'cyan bold',
96 'diff.extended': 'cyan bold',
97 'diff.file_a': 'red bold',
97 'diff.file_a': 'red bold',
98 'diff.file_b': 'green bold',
98 'diff.file_b': 'green bold',
99 'diff.hunk': 'magenta',
99 'diff.hunk': 'magenta',
100 'diff.inserted': 'green',
100 'diff.inserted': 'green',
101 'diff.inserted.changed': 'green bold underline',
101 'diff.inserted.changed': 'green bold underline',
102 'diff.inserted.unchanged': 'green',
102 'diff.inserted.unchanged': 'green',
103 'diff.tab': '',
103 'diff.tab': '',
104 'diff.trailingwhitespace': 'bold red_background',
104 'diff.trailingwhitespace': 'bold red_background',
105 'changeset.public': '',
105 'changeset.public': '',
106 'changeset.draft': '',
106 'changeset.draft': '',
107 'changeset.secret': '',
107 'changeset.secret': '',
108 'diffstat.deleted': 'red',
108 'diffstat.deleted': 'red',
109 'diffstat.inserted': 'green',
109 'diffstat.inserted': 'green',
110 'formatvariant.name.mismatchconfig': 'red',
110 'formatvariant.name.mismatchconfig': 'red',
111 'formatvariant.name.mismatchdefault': 'yellow',
111 'formatvariant.name.mismatchdefault': 'yellow',
112 'formatvariant.name.uptodate': 'green',
112 'formatvariant.name.uptodate': 'green',
113 'formatvariant.repo.mismatchconfig': 'red',
113 'formatvariant.repo.mismatchconfig': 'red',
114 'formatvariant.repo.mismatchdefault': 'yellow',
114 'formatvariant.repo.mismatchdefault': 'yellow',
115 'formatvariant.repo.uptodate': 'green',
115 'formatvariant.repo.uptodate': 'green',
116 'formatvariant.config.special': 'yellow',
116 'formatvariant.config.special': 'yellow',
117 'formatvariant.config.default': 'green',
117 'formatvariant.config.default': 'green',
118 'formatvariant.default': '',
118 'formatvariant.default': '',
119 'histedit.remaining': 'red bold',
119 'histedit.remaining': 'red bold',
120 'ui.addremove.added': 'green',
120 'ui.addremove.added': 'green',
121 'ui.addremove.removed': 'red',
121 'ui.addremove.removed': 'red',
122 'ui.error': 'red',
122 'ui.error': 'red',
123 'ui.prompt': 'yellow',
123 'ui.prompt': 'yellow',
124 'log.changeset': 'yellow',
124 'log.changeset': 'yellow',
125 'patchbomb.finalsummary': '',
125 'patchbomb.finalsummary': '',
126 'patchbomb.from': 'magenta',
126 'patchbomb.from': 'magenta',
127 'patchbomb.to': 'cyan',
127 'patchbomb.to': 'cyan',
128 'patchbomb.subject': 'green',
128 'patchbomb.subject': 'green',
129 'patchbomb.diffstats': '',
129 'patchbomb.diffstats': '',
130 'rebase.rebased': 'blue',
130 'rebase.rebased': 'blue',
131 'rebase.remaining': 'red bold',
131 'rebase.remaining': 'red bold',
132 'resolve.resolved': 'green bold',
132 'resolve.resolved': 'green bold',
133 'resolve.unresolved': 'red bold',
133 'resolve.unresolved': 'red bold',
134 'shelve.age': 'cyan',
134 'shelve.age': 'cyan',
135 'shelve.newest': 'green bold',
135 'shelve.newest': 'green bold',
136 'shelve.name': 'blue bold',
136 'shelve.name': 'blue bold',
137 'status.added': 'green bold',
137 'status.added': 'green bold',
138 'status.clean': 'none',
138 'status.clean': 'none',
139 'status.copied': 'none',
139 'status.copied': 'none',
140 'status.deleted': 'cyan bold underline',
140 'status.deleted': 'cyan bold underline',
141 'status.ignored': 'black bold',
141 'status.ignored': 'black bold',
142 'status.modified': 'blue bold',
142 'status.modified': 'blue bold',
143 'status.removed': 'red bold',
143 'status.removed': 'red bold',
144 'status.unknown': 'magenta bold underline',
144 'status.unknown': 'magenta bold underline',
145 'tags.normal': 'green',
145 'tags.normal': 'green',
146 'tags.local': 'black bold',
146 'tags.local': 'black bold',
147 }
147 }
148
148
149 def loadcolortable(ui, extname, colortable):
149 def loadcolortable(ui, extname, colortable):
150 _defaultstyles.update(colortable)
150 _defaultstyles.update(colortable)
151
151
152 def _terminfosetup(ui, mode, formatted):
152 def _terminfosetup(ui, mode, formatted):
153 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
153 '''Initialize terminfo data and the terminal if we're in terminfo mode.'''
154
154
155 # If we failed to load curses, we go ahead and return.
155 # If we failed to load curses, we go ahead and return.
156 if curses is None:
156 if curses is None:
157 return
157 return
158 # Otherwise, see what the config file says.
158 # Otherwise, see what the config file says.
159 if mode not in ('auto', 'terminfo'):
159 if mode not in ('auto', 'terminfo'):
160 return
160 return
161 ui._terminfoparams.update(_baseterminfoparams)
161 ui._terminfoparams.update(_baseterminfoparams)
162
162
163 for key, val in ui.configitems('color'):
163 for key, val in ui.configitems('color'):
164 if key.startswith('color.'):
164 if key.startswith('color.'):
165 newval = (False, int(val), '')
165 newval = (False, int(val), '')
166 ui._terminfoparams[key[6:]] = newval
166 ui._terminfoparams[key[6:]] = newval
167 elif key.startswith('terminfo.'):
167 elif key.startswith('terminfo.'):
168 newval = (True, '', val.replace('\\E', '\x1b'))
168 newval = (True, '', val.replace('\\E', '\x1b'))
169 ui._terminfoparams[key[9:]] = newval
169 ui._terminfoparams[key[9:]] = newval
170 try:
170 try:
171 curses.setupterm()
171 curses.setupterm()
172 except curses.error:
172 except curses.error:
173 ui._terminfoparams.clear()
173 ui._terminfoparams.clear()
174 return
174 return
175
175
176 for key, (b, e, c) in ui._terminfoparams.copy().items():
176 for key, (b, e, c) in ui._terminfoparams.copy().items():
177 if not b:
177 if not b:
178 continue
178 continue
179 if not c and not curses.tigetstr(pycompat.sysstr(e)):
179 if not c and not curses.tigetstr(pycompat.sysstr(e)):
180 # Most terminals don't support dim, invis, etc, so don't be
180 # Most terminals don't support dim, invis, etc, so don't be
181 # noisy and use ui.debug().
181 # noisy and use ui.debug().
182 ui.debug("no terminfo entry for %s\n" % e)
182 ui.debug("no terminfo entry for %s\n" % e)
183 del ui._terminfoparams[key]
183 del ui._terminfoparams[key]
184 if not curses.tigetstr(r'setaf') or not curses.tigetstr(r'setab'):
184 if not curses.tigetstr(r'setaf') or not curses.tigetstr(r'setab'):
185 # Only warn about missing terminfo entries if we explicitly asked for
185 # Only warn about missing terminfo entries if we explicitly asked for
186 # terminfo mode and we're in a formatted terminal.
186 # terminfo mode and we're in a formatted terminal.
187 if mode == "terminfo" and formatted:
187 if mode == "terminfo" and formatted:
188 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
188 ui.warn(_("no terminfo entry for setab/setaf: reverting to "
189 "ECMA-48 color\n"))
189 "ECMA-48 color\n"))
190 ui._terminfoparams.clear()
190 ui._terminfoparams.clear()
191
191
192 def setup(ui):
192 def setup(ui):
193 """configure color on a ui
193 """configure color on a ui
194
194
195 That function both set the colormode for the ui object and read
195 That function both set the colormode for the ui object and read
196 the configuration looking for custom colors and effect definitions."""
196 the configuration looking for custom colors and effect definitions."""
197 mode = _modesetup(ui)
197 mode = _modesetup(ui)
198 ui._colormode = mode
198 ui._colormode = mode
199 if mode and mode != 'debug':
199 if mode and mode != 'debug':
200 configstyles(ui)
200 configstyles(ui)
201
201
202 def _modesetup(ui):
202 def _modesetup(ui):
203 if ui.plain('color'):
203 if ui.plain('color'):
204 return None
204 return None
205 config = ui.config('ui', 'color')
205 config = ui.config('ui', 'color')
206 if config == 'debug':
206 if config == 'debug':
207 return 'debug'
207 return 'debug'
208
208
209 auto = (config == 'auto')
209 auto = (config == 'auto')
210 always = False
210 always = False
211 if not auto and stringutil.parsebool(config):
211 if not auto and stringutil.parsebool(config):
212 # We want the config to behave like a boolean, "on" is actually auto,
212 # We want the config to behave like a boolean, "on" is actually auto,
213 # but "always" value is treated as a special case to reduce confusion.
213 # but "always" value is treated as a special case to reduce confusion.
214 if ui.configsource('ui', 'color') == '--color' or config == 'always':
214 if ui.configsource('ui', 'color') == '--color' or config == 'always':
215 always = True
215 always = True
216 else:
216 else:
217 auto = True
217 auto = True
218
218
219 if not always and not auto:
219 if not always and not auto:
220 return None
220 return None
221
221
222 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
222 formatted = (always or (encoding.environ.get('TERM') != 'dumb'
223 and ui.formatted()))
223 and ui.formatted()))
224
224
225 mode = ui.config('color', 'mode')
225 mode = ui.config('color', 'mode')
226
226
227 # If pager is active, color.pagermode overrides color.mode.
227 # If pager is active, color.pagermode overrides color.mode.
228 if getattr(ui, 'pageractive', False):
228 if getattr(ui, 'pageractive', False):
229 mode = ui.config('color', 'pagermode', mode)
229 mode = ui.config('color', 'pagermode', mode)
230
230
231 realmode = mode
231 realmode = mode
232 if pycompat.iswindows:
232 if pycompat.iswindows:
233 from . import win32
233 from . import win32
234
234
235 term = encoding.environ.get('TERM')
235 term = encoding.environ.get('TERM')
236 # TERM won't be defined in a vanilla cmd.exe environment.
236 # TERM won't be defined in a vanilla cmd.exe environment.
237
237
238 # UNIX-like environments on Windows such as Cygwin and MSYS will
238 # UNIX-like environments on Windows such as Cygwin and MSYS will
239 # set TERM. They appear to make a best effort attempt at setting it
239 # set TERM. They appear to make a best effort attempt at setting it
240 # to something appropriate. However, not all environments with TERM
240 # to something appropriate. However, not all environments with TERM
241 # defined support ANSI.
241 # defined support ANSI.
242 ansienviron = term and 'xterm' in term
242 ansienviron = term and 'xterm' in term
243
243
244 if mode == 'auto':
244 if mode == 'auto':
245 # Since "ansi" could result in terminal gibberish, we error on the
245 # Since "ansi" could result in terminal gibberish, we error on the
246 # side of selecting "win32". However, if w32effects is not defined,
246 # side of selecting "win32". However, if w32effects is not defined,
247 # we almost certainly don't support "win32", so don't even try.
247 # we almost certainly don't support "win32", so don't even try.
248 # w32effects is not populated when stdout is redirected, so checking
248 # w32effects is not populated when stdout is redirected, so checking
249 # it first avoids win32 calls in a state known to error out.
249 # it first avoids win32 calls in a state known to error out.
250 if ansienviron or not w32effects or win32.enablevtmode():
250 if ansienviron or not w32effects or win32.enablevtmode():
251 realmode = 'ansi'
251 realmode = 'ansi'
252 else:
252 else:
253 realmode = 'win32'
253 realmode = 'win32'
254 # An empty w32effects is a clue that stdout is redirected, and thus
254 # An empty w32effects is a clue that stdout is redirected, and thus
255 # cannot enable VT mode.
255 # cannot enable VT mode.
256 elif mode == 'ansi' and w32effects and not ansienviron:
256 elif mode == 'ansi' and w32effects and not ansienviron:
257 win32.enablevtmode()
257 win32.enablevtmode()
258 elif mode == 'auto':
258 elif mode == 'auto':
259 realmode = 'ansi'
259 realmode = 'ansi'
260
260
261 def modewarn():
261 def modewarn():
262 # only warn if color.mode was explicitly set and we're in
262 # only warn if color.mode was explicitly set and we're in
263 # a formatted terminal
263 # a formatted terminal
264 if mode == realmode and formatted:
264 if mode == realmode and formatted:
265 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
265 ui.warn(_('warning: failed to set color mode to %s\n') % mode)
266
266
267 if realmode == 'win32':
267 if realmode == 'win32':
268 ui._terminfoparams.clear()
268 ui._terminfoparams.clear()
269 if not w32effects:
269 if not w32effects:
270 modewarn()
270 modewarn()
271 return None
271 return None
272 elif realmode == 'ansi':
272 elif realmode == 'ansi':
273 ui._terminfoparams.clear()
273 ui._terminfoparams.clear()
274 elif realmode == 'terminfo':
274 elif realmode == 'terminfo':
275 _terminfosetup(ui, mode, formatted)
275 _terminfosetup(ui, mode, formatted)
276 if not ui._terminfoparams:
276 if not ui._terminfoparams:
277 ## FIXME Shouldn't we return None in this case too?
277 ## FIXME Shouldn't we return None in this case too?
278 modewarn()
278 modewarn()
279 realmode = 'ansi'
279 realmode = 'ansi'
280 else:
280 else:
281 return None
281 return None
282
282
283 if always or (auto and formatted):
283 if always or (auto and formatted):
284 return realmode
284 return realmode
285 return None
285 return None
286
286
287 def configstyles(ui):
287 def configstyles(ui):
288 ui._styles.update(_defaultstyles)
288 ui._styles.update(_defaultstyles)
289 for status, cfgeffects in ui.configitems('color'):
289 for status, cfgeffects in ui.configitems('color'):
290 if '.' not in status or status.startswith(('color.', 'terminfo.')):
290 if '.' not in status or status.startswith(('color.', 'terminfo.')):
291 continue
291 continue
292 cfgeffects = ui.configlist('color', status)
292 cfgeffects = ui.configlist('color', status)
293 if cfgeffects:
293 if cfgeffects:
294 good = []
294 good = []
295 for e in cfgeffects:
295 for e in cfgeffects:
296 if valideffect(ui, e):
296 if valideffect(ui, e):
297 good.append(e)
297 good.append(e)
298 else:
298 else:
299 ui.warn(_("ignoring unknown color/effect %s "
299 ui.warn(_("ignoring unknown color/effect %s "
300 "(configured in color.%s)\n")
300 "(configured in color.%s)\n")
301 % (stringutil.pprint(e), status))
301 % (stringutil.pprint(e), status))
302 ui._styles[status] = ' '.join(good)
302 ui._styles[status] = ' '.join(good)
303
303
304 def _activeeffects(ui):
304 def _activeeffects(ui):
305 '''Return the effects map for the color mode set on the ui.'''
305 '''Return the effects map for the color mode set on the ui.'''
306 if ui._colormode == 'win32':
306 if ui._colormode == 'win32':
307 return w32effects
307 return w32effects
308 elif ui._colormode is not None:
308 elif ui._colormode is not None:
309 return _effects
309 return _effects
310 return {}
310 return {}
311
311
312 def valideffect(ui, effect):
312 def valideffect(ui, effect):
313 'Determine if the effect is valid or not.'
313 'Determine if the effect is valid or not.'
314 return ((not ui._terminfoparams and effect in _activeeffects(ui))
314 return ((not ui._terminfoparams and effect in _activeeffects(ui))
315 or (effect in ui._terminfoparams
315 or (effect in ui._terminfoparams
316 or effect[:-11] in ui._terminfoparams))
316 or effect[:-11] in ui._terminfoparams))
317
317
318 def _effect_str(ui, effect):
318 def _effect_str(ui, effect):
319 '''Helper function for render_effects().'''
319 '''Helper function for render_effects().'''
320
320
321 bg = False
321 bg = False
322 if effect.endswith('_background'):
322 if effect.endswith('_background'):
323 bg = True
323 bg = True
324 effect = effect[:-11]
324 effect = effect[:-11]
325 try:
325 try:
326 attr, val, termcode = ui._terminfoparams[effect]
326 attr, val, termcode = ui._terminfoparams[effect]
327 except KeyError:
327 except KeyError:
328 return ''
328 return ''
329 if attr:
329 if attr:
330 if termcode:
330 if termcode:
331 return termcode
331 return termcode
332 else:
332 else:
333 return curses.tigetstr(pycompat.sysstr(val))
333 return curses.tigetstr(pycompat.sysstr(val))
334 elif bg:
334 elif bg:
335 return curses.tparm(curses.tigetstr(r'setab'), val)
335 return curses.tparm(curses.tigetstr(r'setab'), val)
336 else:
336 else:
337 return curses.tparm(curses.tigetstr(r'setaf'), val)
337 return curses.tparm(curses.tigetstr(r'setaf'), val)
338
338
339 def _mergeeffects(text, start, stop):
339 def _mergeeffects(text, start, stop):
340 """Insert start sequence at every occurrence of stop sequence
340 """Insert start sequence at every occurrence of stop sequence
341
341
342 >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
342 >>> s = _mergeeffects(b'cyan', b'[C]', b'|')
343 >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
343 >>> s = _mergeeffects(s + b'yellow', b'[Y]', b'|')
344 >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
344 >>> s = _mergeeffects(b'ma' + s + b'genta', b'[M]', b'|')
345 >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
345 >>> s = _mergeeffects(b'red' + s, b'[R]', b'|')
346 >>> s
346 >>> s
347 '[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
347 '[R]red[M]ma[Y][C]cyan|[R][M][Y]yellow|[R][M]genta|'
348 """
348 """
349 parts = []
349 parts = []
350 for t in text.split(stop):
350 for t in text.split(stop):
351 if not t:
351 if not t:
352 continue
352 continue
353 parts.extend([start, t, stop])
353 parts.extend([start, t, stop])
354 return ''.join(parts)
354 return ''.join(parts)
355
355
356 def _render_effects(ui, text, effects):
356 def _render_effects(ui, text, effects):
357 'Wrap text in commands to turn on each effect.'
357 'Wrap text in commands to turn on each effect.'
358 if not text:
358 if not text:
359 return text
359 return text
360 if ui._terminfoparams:
360 if ui._terminfoparams:
361 start = ''.join(_effect_str(ui, effect)
361 start = ''.join(_effect_str(ui, effect)
362 for effect in ['none'] + effects.split())
362 for effect in ['none'] + effects.split())
363 stop = _effect_str(ui, 'none')
363 stop = _effect_str(ui, 'none')
364 else:
364 else:
365 activeeffects = _activeeffects(ui)
365 activeeffects = _activeeffects(ui)
366 start = [pycompat.bytestr(activeeffects[e])
366 start = [pycompat.bytestr(activeeffects[e])
367 for e in ['none'] + effects.split()]
367 for e in ['none'] + effects.split()]
368 start = '\033[' + ';'.join(start) + 'm'
368 start = '\033[' + ';'.join(start) + 'm'
369 stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
369 stop = '\033[' + pycompat.bytestr(activeeffects['none']) + 'm'
370 return _mergeeffects(text, start, stop)
370 return _mergeeffects(text, start, stop)
371
371
372 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
372 _ansieffectre = re.compile(br'\x1b\[[0-9;]*m')
373
373
374 def stripeffects(text):
374 def stripeffects(text):
375 """Strip ANSI control codes which could be inserted by colorlabel()"""
375 """Strip ANSI control codes which could be inserted by colorlabel()"""
376 return _ansieffectre.sub('', text)
376 return _ansieffectre.sub('', text)
377
377
378 def colorlabel(ui, msg, label):
378 def colorlabel(ui, msg, label):
379 """add color control code according to the mode"""
379 """add color control code according to the mode"""
380 if ui._colormode == 'debug':
380 if ui._colormode == 'debug':
381 if label and msg:
381 if label and msg:
382 if msg.endswith('\n'):
382 if msg.endswith('\n'):
383 msg = "[%s|%s]\n" % (label, msg[:-1])
383 msg = "[%s|%s]\n" % (label, msg[:-1])
384 else:
384 else:
385 msg = "[%s|%s]" % (label, msg)
385 msg = "[%s|%s]" % (label, msg)
386 elif ui._colormode is not None:
386 elif ui._colormode is not None:
387 effects = []
387 effects = []
388 for l in label.split():
388 for l in label.split():
389 s = ui._styles.get(l, '')
389 s = ui._styles.get(l, '')
390 if s:
390 if s:
391 effects.append(s)
391 effects.append(s)
392 elif valideffect(ui, l):
392 elif valideffect(ui, l):
393 effects.append(l)
393 effects.append(l)
394 effects = ' '.join(effects)
394 effects = ' '.join(effects)
395 if effects:
395 if effects:
396 msg = '\n'.join([_render_effects(ui, line, effects)
396 msg = '\n'.join([_render_effects(ui, line, effects)
397 for line in msg.split('\n')])
397 for line in msg.split('\n')])
398 return msg
398 return msg
399
399
400 w32effects = None
400 w32effects = None
401 if pycompat.iswindows:
401 if pycompat.iswindows:
402 import ctypes
402 import ctypes
403
403
404 _kernel32 = ctypes.windll.kernel32
404 _kernel32 = ctypes.windll.kernel32
405
405
406 _WORD = ctypes.c_ushort
406 _WORD = ctypes.c_ushort
407
407
408 _INVALID_HANDLE_VALUE = -1
408 _INVALID_HANDLE_VALUE = -1
409
409
410 class _COORD(ctypes.Structure):
410 class _COORD(ctypes.Structure):
411 _fields_ = [(r'X', ctypes.c_short),
411 _fields_ = [(r'X', ctypes.c_short),
412 (r'Y', ctypes.c_short)]
412 (r'Y', ctypes.c_short)]
413
413
414 class _SMALL_RECT(ctypes.Structure):
414 class _SMALL_RECT(ctypes.Structure):
415 _fields_ = [(r'Left', ctypes.c_short),
415 _fields_ = [(r'Left', ctypes.c_short),
416 (r'Top', ctypes.c_short),
416 (r'Top', ctypes.c_short),
417 (r'Right', ctypes.c_short),
417 (r'Right', ctypes.c_short),
418 (r'Bottom', ctypes.c_short)]
418 (r'Bottom', ctypes.c_short)]
419
419
420 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
420 class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
421 _fields_ = [(r'dwSize', _COORD),
421 _fields_ = [(r'dwSize', _COORD),
422 (r'dwCursorPosition', _COORD),
422 (r'dwCursorPosition', _COORD),
423 (r'wAttributes', _WORD),
423 (r'wAttributes', _WORD),
424 (r'srWindow', _SMALL_RECT),
424 (r'srWindow', _SMALL_RECT),
425 (r'dwMaximumWindowSize', _COORD)]
425 (r'dwMaximumWindowSize', _COORD)]
426
426
427 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
427 _STD_OUTPUT_HANDLE = 0xfffffff5 # (DWORD)-11
428 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
428 _STD_ERROR_HANDLE = 0xfffffff4 # (DWORD)-12
429
429
430 _FOREGROUND_BLUE = 0x0001
430 _FOREGROUND_BLUE = 0x0001
431 _FOREGROUND_GREEN = 0x0002
431 _FOREGROUND_GREEN = 0x0002
432 _FOREGROUND_RED = 0x0004
432 _FOREGROUND_RED = 0x0004
433 _FOREGROUND_INTENSITY = 0x0008
433 _FOREGROUND_INTENSITY = 0x0008
434
434
435 _BACKGROUND_BLUE = 0x0010
435 _BACKGROUND_BLUE = 0x0010
436 _BACKGROUND_GREEN = 0x0020
436 _BACKGROUND_GREEN = 0x0020
437 _BACKGROUND_RED = 0x0040
437 _BACKGROUND_RED = 0x0040
438 _BACKGROUND_INTENSITY = 0x0080
438 _BACKGROUND_INTENSITY = 0x0080
439
439
440 _COMMON_LVB_REVERSE_VIDEO = 0x4000
440 _COMMON_LVB_REVERSE_VIDEO = 0x4000
441 _COMMON_LVB_UNDERSCORE = 0x8000
441 _COMMON_LVB_UNDERSCORE = 0x8000
442
442
443 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
443 # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
444 w32effects = {
444 w32effects = {
445 'none': -1,
445 'none': -1,
446 'black': 0,
446 'black': 0,
447 'red': _FOREGROUND_RED,
447 'red': _FOREGROUND_RED,
448 'green': _FOREGROUND_GREEN,
448 'green': _FOREGROUND_GREEN,
449 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
449 'yellow': _FOREGROUND_RED | _FOREGROUND_GREEN,
450 'blue': _FOREGROUND_BLUE,
450 'blue': _FOREGROUND_BLUE,
451 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
451 'magenta': _FOREGROUND_BLUE | _FOREGROUND_RED,
452 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
452 'cyan': _FOREGROUND_BLUE | _FOREGROUND_GREEN,
453 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
453 'white': _FOREGROUND_RED | _FOREGROUND_GREEN | _FOREGROUND_BLUE,
454 'bold': _FOREGROUND_INTENSITY,
454 'bold': _FOREGROUND_INTENSITY,
455 'black_background': 0x100, # unused value > 0x0f
455 'black_background': 0x100, # unused value > 0x0f
456 'red_background': _BACKGROUND_RED,
456 'red_background': _BACKGROUND_RED,
457 'green_background': _BACKGROUND_GREEN,
457 'green_background': _BACKGROUND_GREEN,
458 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
458 'yellow_background': _BACKGROUND_RED | _BACKGROUND_GREEN,
459 'blue_background': _BACKGROUND_BLUE,
459 'blue_background': _BACKGROUND_BLUE,
460 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
460 'purple_background': _BACKGROUND_BLUE | _BACKGROUND_RED,
461 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
461 'cyan_background': _BACKGROUND_BLUE | _BACKGROUND_GREEN,
462 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
462 'white_background': (_BACKGROUND_RED | _BACKGROUND_GREEN |
463 _BACKGROUND_BLUE),
463 _BACKGROUND_BLUE),
464 'bold_background': _BACKGROUND_INTENSITY,
464 'bold_background': _BACKGROUND_INTENSITY,
465 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
465 'underline': _COMMON_LVB_UNDERSCORE, # double-byte charsets only
466 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
466 'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
467 }
467 }
468
468
469 passthrough = {_FOREGROUND_INTENSITY,
469 passthrough = {_FOREGROUND_INTENSITY,
470 _BACKGROUND_INTENSITY,
470 _BACKGROUND_INTENSITY,
471 _COMMON_LVB_UNDERSCORE,
471 _COMMON_LVB_UNDERSCORE,
472 _COMMON_LVB_REVERSE_VIDEO}
472 _COMMON_LVB_REVERSE_VIDEO}
473
473
474 stdout = _kernel32.GetStdHandle(
474 stdout = _kernel32.GetStdHandle(
475 _STD_OUTPUT_HANDLE) # don't close the handle returned
475 _STD_OUTPUT_HANDLE) # don't close the handle returned
476 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
476 if stdout is None or stdout == _INVALID_HANDLE_VALUE:
477 w32effects = None
477 w32effects = None
478 else:
478 else:
479 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
479 csbi = _CONSOLE_SCREEN_BUFFER_INFO()
480 if not _kernel32.GetConsoleScreenBufferInfo(
480 if not _kernel32.GetConsoleScreenBufferInfo(
481 stdout, ctypes.byref(csbi)):
481 stdout, ctypes.byref(csbi)):
482 # stdout may not support GetConsoleScreenBufferInfo()
482 # stdout may not support GetConsoleScreenBufferInfo()
483 # when called from subprocess or redirected
483 # when called from subprocess or redirected
484 w32effects = None
484 w32effects = None
485 else:
485 else:
486 origattr = csbi.wAttributes
486 origattr = csbi.wAttributes
487 ansire = re.compile(b'\033\[([^m]*)m([^\033]*)(.*)',
487 ansire = re.compile(br'\033\[([^m]*)m([^\033]*)(.*)',
488 re.MULTILINE | re.DOTALL)
488 re.MULTILINE | re.DOTALL)
489
489
490 def win32print(ui, writefunc, text, **opts):
490 def win32print(ui, writefunc, text, **opts):
491 label = opts.get(r'label', '')
491 label = opts.get(r'label', '')
492 attr = origattr
492 attr = origattr
493
493
494 def mapcolor(val, attr):
494 def mapcolor(val, attr):
495 if val == -1:
495 if val == -1:
496 return origattr
496 return origattr
497 elif val in passthrough:
497 elif val in passthrough:
498 return attr | val
498 return attr | val
499 elif val > 0x0f:
499 elif val > 0x0f:
500 return (val & 0x70) | (attr & 0x8f)
500 return (val & 0x70) | (attr & 0x8f)
501 else:
501 else:
502 return (val & 0x07) | (attr & 0xf8)
502 return (val & 0x07) | (attr & 0xf8)
503
503
504 # determine console attributes based on labels
504 # determine console attributes based on labels
505 for l in label.split():
505 for l in label.split():
506 style = ui._styles.get(l, '')
506 style = ui._styles.get(l, '')
507 for effect in style.split():
507 for effect in style.split():
508 try:
508 try:
509 attr = mapcolor(w32effects[effect], attr)
509 attr = mapcolor(w32effects[effect], attr)
510 except KeyError:
510 except KeyError:
511 # w32effects could not have certain attributes so we skip
511 # w32effects could not have certain attributes so we skip
512 # them if not found
512 # them if not found
513 pass
513 pass
514 # hack to ensure regexp finds data
514 # hack to ensure regexp finds data
515 if not text.startswith(b'\033['):
515 if not text.startswith(b'\033['):
516 text = b'\033[m' + text
516 text = b'\033[m' + text
517
517
518 # Look for ANSI-like codes embedded in text
518 # Look for ANSI-like codes embedded in text
519 m = re.match(ansire, text)
519 m = re.match(ansire, text)
520
520
521 try:
521 try:
522 while m:
522 while m:
523 for sattr in m.group(1).split(b';'):
523 for sattr in m.group(1).split(b';'):
524 if sattr:
524 if sattr:
525 attr = mapcolor(int(sattr), attr)
525 attr = mapcolor(int(sattr), attr)
526 ui.flush()
526 ui.flush()
527 _kernel32.SetConsoleTextAttribute(stdout, attr)
527 _kernel32.SetConsoleTextAttribute(stdout, attr)
528 writefunc(m.group(2))
528 writefunc(m.group(2))
529 m = re.match(ansire, m.group(3))
529 m = re.match(ansire, m.group(3))
530 finally:
530 finally:
531 # Explicitly reset original attributes
531 # Explicitly reset original attributes
532 ui.flush()
532 ui.flush()
533 _kernel32.SetConsoleTextAttribute(stdout, origattr)
533 _kernel32.SetConsoleTextAttribute(stdout, origattr)
@@ -1,2862 +1,2862
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from __future__ import absolute_import, print_function
9 from __future__ import absolute_import, print_function
10
10
11 import collections
11 import collections
12 import contextlib
12 import contextlib
13 import copy
13 import copy
14 import email
14 import email
15 import errno
15 import errno
16 import hashlib
16 import hashlib
17 import os
17 import os
18 import posixpath
18 import posixpath
19 import re
19 import re
20 import shutil
20 import shutil
21 import zlib
21 import zlib
22
22
23 from .i18n import _
23 from .i18n import _
24 from .node import (
24 from .node import (
25 hex,
25 hex,
26 short,
26 short,
27 )
27 )
28 from . import (
28 from . import (
29 copies,
29 copies,
30 diffhelper,
30 diffhelper,
31 diffutil,
31 diffutil,
32 encoding,
32 encoding,
33 error,
33 error,
34 mail,
34 mail,
35 match as matchmod,
35 match as matchmod,
36 mdiff,
36 mdiff,
37 pathutil,
37 pathutil,
38 pycompat,
38 pycompat,
39 scmutil,
39 scmutil,
40 similar,
40 similar,
41 util,
41 util,
42 vfs as vfsmod,
42 vfs as vfsmod,
43 )
43 )
44 from .utils import (
44 from .utils import (
45 dateutil,
45 dateutil,
46 procutil,
46 procutil,
47 stringutil,
47 stringutil,
48 )
48 )
49
49
50 stringio = util.stringio
50 stringio = util.stringio
51
51
52 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
52 gitre = re.compile(br'diff --git a/(.*) b/(.*)')
53 tabsplitter = re.compile(br'(\t+|[^\t]+)')
53 tabsplitter = re.compile(br'(\t+|[^\t]+)')
54 wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
54 wordsplitter = re.compile(br'(\t+| +|[a-zA-Z0-9_\x80-\xff]+|'
55 b'[^ \ta-zA-Z0-9_\x80-\xff])')
55 b'[^ \ta-zA-Z0-9_\x80-\xff])')
56
56
57 PatchError = error.PatchError
57 PatchError = error.PatchError
58
58
59 # public functions
59 # public functions
60
60
61 def split(stream):
61 def split(stream):
62 '''return an iterator of individual patches from a stream'''
62 '''return an iterator of individual patches from a stream'''
63 def isheader(line, inheader):
63 def isheader(line, inheader):
64 if inheader and line.startswith((' ', '\t')):
64 if inheader and line.startswith((' ', '\t')):
65 # continuation
65 # continuation
66 return True
66 return True
67 if line.startswith((' ', '-', '+')):
67 if line.startswith((' ', '-', '+')):
68 # diff line - don't check for header pattern in there
68 # diff line - don't check for header pattern in there
69 return False
69 return False
70 l = line.split(': ', 1)
70 l = line.split(': ', 1)
71 return len(l) == 2 and ' ' not in l[0]
71 return len(l) == 2 and ' ' not in l[0]
72
72
73 def chunk(lines):
73 def chunk(lines):
74 return stringio(''.join(lines))
74 return stringio(''.join(lines))
75
75
76 def hgsplit(stream, cur):
76 def hgsplit(stream, cur):
77 inheader = True
77 inheader = True
78
78
79 for line in stream:
79 for line in stream:
80 if not line.strip():
80 if not line.strip():
81 inheader = False
81 inheader = False
82 if not inheader and line.startswith('# HG changeset patch'):
82 if not inheader and line.startswith('# HG changeset patch'):
83 yield chunk(cur)
83 yield chunk(cur)
84 cur = []
84 cur = []
85 inheader = True
85 inheader = True
86
86
87 cur.append(line)
87 cur.append(line)
88
88
89 if cur:
89 if cur:
90 yield chunk(cur)
90 yield chunk(cur)
91
91
92 def mboxsplit(stream, cur):
92 def mboxsplit(stream, cur):
93 for line in stream:
93 for line in stream:
94 if line.startswith('From '):
94 if line.startswith('From '):
95 for c in split(chunk(cur[1:])):
95 for c in split(chunk(cur[1:])):
96 yield c
96 yield c
97 cur = []
97 cur = []
98
98
99 cur.append(line)
99 cur.append(line)
100
100
101 if cur:
101 if cur:
102 for c in split(chunk(cur[1:])):
102 for c in split(chunk(cur[1:])):
103 yield c
103 yield c
104
104
105 def mimesplit(stream, cur):
105 def mimesplit(stream, cur):
106 def msgfp(m):
106 def msgfp(m):
107 fp = stringio()
107 fp = stringio()
108 g = email.Generator.Generator(fp, mangle_from_=False)
108 g = email.Generator.Generator(fp, mangle_from_=False)
109 g.flatten(m)
109 g.flatten(m)
110 fp.seek(0)
110 fp.seek(0)
111 return fp
111 return fp
112
112
113 for line in stream:
113 for line in stream:
114 cur.append(line)
114 cur.append(line)
115 c = chunk(cur)
115 c = chunk(cur)
116
116
117 m = mail.parse(c)
117 m = mail.parse(c)
118 if not m.is_multipart():
118 if not m.is_multipart():
119 yield msgfp(m)
119 yield msgfp(m)
120 else:
120 else:
121 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
121 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
122 for part in m.walk():
122 for part in m.walk():
123 ct = part.get_content_type()
123 ct = part.get_content_type()
124 if ct not in ok_types:
124 if ct not in ok_types:
125 continue
125 continue
126 yield msgfp(part)
126 yield msgfp(part)
127
127
128 def headersplit(stream, cur):
128 def headersplit(stream, cur):
129 inheader = False
129 inheader = False
130
130
131 for line in stream:
131 for line in stream:
132 if not inheader and isheader(line, inheader):
132 if not inheader and isheader(line, inheader):
133 yield chunk(cur)
133 yield chunk(cur)
134 cur = []
134 cur = []
135 inheader = True
135 inheader = True
136 if inheader and not isheader(line, inheader):
136 if inheader and not isheader(line, inheader):
137 inheader = False
137 inheader = False
138
138
139 cur.append(line)
139 cur.append(line)
140
140
141 if cur:
141 if cur:
142 yield chunk(cur)
142 yield chunk(cur)
143
143
144 def remainder(cur):
144 def remainder(cur):
145 yield chunk(cur)
145 yield chunk(cur)
146
146
147 class fiter(object):
147 class fiter(object):
148 def __init__(self, fp):
148 def __init__(self, fp):
149 self.fp = fp
149 self.fp = fp
150
150
151 def __iter__(self):
151 def __iter__(self):
152 return self
152 return self
153
153
154 def next(self):
154 def next(self):
155 l = self.fp.readline()
155 l = self.fp.readline()
156 if not l:
156 if not l:
157 raise StopIteration
157 raise StopIteration
158 return l
158 return l
159
159
160 __next__ = next
160 __next__ = next
161
161
162 inheader = False
162 inheader = False
163 cur = []
163 cur = []
164
164
165 mimeheaders = ['content-type']
165 mimeheaders = ['content-type']
166
166
167 if not util.safehasattr(stream, 'next'):
167 if not util.safehasattr(stream, 'next'):
168 # http responses, for example, have readline but not next
168 # http responses, for example, have readline but not next
169 stream = fiter(stream)
169 stream = fiter(stream)
170
170
171 for line in stream:
171 for line in stream:
172 cur.append(line)
172 cur.append(line)
173 if line.startswith('# HG changeset patch'):
173 if line.startswith('# HG changeset patch'):
174 return hgsplit(stream, cur)
174 return hgsplit(stream, cur)
175 elif line.startswith('From '):
175 elif line.startswith('From '):
176 return mboxsplit(stream, cur)
176 return mboxsplit(stream, cur)
177 elif isheader(line, inheader):
177 elif isheader(line, inheader):
178 inheader = True
178 inheader = True
179 if line.split(':', 1)[0].lower() in mimeheaders:
179 if line.split(':', 1)[0].lower() in mimeheaders:
180 # let email parser handle this
180 # let email parser handle this
181 return mimesplit(stream, cur)
181 return mimesplit(stream, cur)
182 elif line.startswith('--- ') and inheader:
182 elif line.startswith('--- ') and inheader:
183 # No evil headers seen by diff start, split by hand
183 # No evil headers seen by diff start, split by hand
184 return headersplit(stream, cur)
184 return headersplit(stream, cur)
185 # Not enough info, keep reading
185 # Not enough info, keep reading
186
186
187 # if we are here, we have a very plain patch
187 # if we are here, we have a very plain patch
188 return remainder(cur)
188 return remainder(cur)
189
189
190 ## Some facility for extensible patch parsing:
190 ## Some facility for extensible patch parsing:
191 # list of pairs ("header to match", "data key")
191 # list of pairs ("header to match", "data key")
192 patchheadermap = [('Date', 'date'),
192 patchheadermap = [('Date', 'date'),
193 ('Branch', 'branch'),
193 ('Branch', 'branch'),
194 ('Node ID', 'nodeid'),
194 ('Node ID', 'nodeid'),
195 ]
195 ]
196
196
197 @contextlib.contextmanager
197 @contextlib.contextmanager
198 def extract(ui, fileobj):
198 def extract(ui, fileobj):
199 '''extract patch from data read from fileobj.
199 '''extract patch from data read from fileobj.
200
200
201 patch can be a normal patch or contained in an email message.
201 patch can be a normal patch or contained in an email message.
202
202
203 return a dictionary. Standard keys are:
203 return a dictionary. Standard keys are:
204 - filename,
204 - filename,
205 - message,
205 - message,
206 - user,
206 - user,
207 - date,
207 - date,
208 - branch,
208 - branch,
209 - node,
209 - node,
210 - p1,
210 - p1,
211 - p2.
211 - p2.
212 Any item can be missing from the dictionary. If filename is missing,
212 Any item can be missing from the dictionary. If filename is missing,
213 fileobj did not contain a patch. Caller must unlink filename when done.'''
213 fileobj did not contain a patch. Caller must unlink filename when done.'''
214
214
215 fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
215 fd, tmpname = pycompat.mkstemp(prefix='hg-patch-')
216 tmpfp = os.fdopen(fd, r'wb')
216 tmpfp = os.fdopen(fd, r'wb')
217 try:
217 try:
218 yield _extract(ui, fileobj, tmpname, tmpfp)
218 yield _extract(ui, fileobj, tmpname, tmpfp)
219 finally:
219 finally:
220 tmpfp.close()
220 tmpfp.close()
221 os.unlink(tmpname)
221 os.unlink(tmpname)
222
222
223 def _extract(ui, fileobj, tmpname, tmpfp):
223 def _extract(ui, fileobj, tmpname, tmpfp):
224
224
225 # attempt to detect the start of a patch
225 # attempt to detect the start of a patch
226 # (this heuristic is borrowed from quilt)
226 # (this heuristic is borrowed from quilt)
227 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
227 diffre = re.compile(br'^(?:Index:[ \t]|diff[ \t]-|RCS file: |'
228 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
228 br'retrieving revision [0-9]+(\.[0-9]+)*$|'
229 br'---[ \t].*?^\+\+\+[ \t]|'
229 br'---[ \t].*?^\+\+\+[ \t]|'
230 br'\*\*\*[ \t].*?^---[ \t])',
230 br'\*\*\*[ \t].*?^---[ \t])',
231 re.MULTILINE | re.DOTALL)
231 re.MULTILINE | re.DOTALL)
232
232
233 data = {}
233 data = {}
234
234
235 msg = mail.parse(fileobj)
235 msg = mail.parse(fileobj)
236
236
237 subject = msg[r'Subject'] and mail.headdecode(msg[r'Subject'])
237 subject = msg[r'Subject'] and mail.headdecode(msg[r'Subject'])
238 data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
238 data['user'] = msg[r'From'] and mail.headdecode(msg[r'From'])
239 if not subject and not data['user']:
239 if not subject and not data['user']:
240 # Not an email, restore parsed headers if any
240 # Not an email, restore parsed headers if any
241 subject = '\n'.join(': '.join(map(encoding.strtolocal, h))
241 subject = '\n'.join(': '.join(map(encoding.strtolocal, h))
242 for h in msg.items()) + '\n'
242 for h in msg.items()) + '\n'
243
243
244 # should try to parse msg['Date']
244 # should try to parse msg['Date']
245 parents = []
245 parents = []
246
246
247 if subject:
247 if subject:
248 if subject.startswith('[PATCH'):
248 if subject.startswith('[PATCH'):
249 pend = subject.find(']')
249 pend = subject.find(']')
250 if pend >= 0:
250 if pend >= 0:
251 subject = subject[pend + 1:].lstrip()
251 subject = subject[pend + 1:].lstrip()
252 subject = re.sub(br'\n[ \t]+', ' ', subject)
252 subject = re.sub(br'\n[ \t]+', ' ', subject)
253 ui.debug('Subject: %s\n' % subject)
253 ui.debug('Subject: %s\n' % subject)
254 if data['user']:
254 if data['user']:
255 ui.debug('From: %s\n' % data['user'])
255 ui.debug('From: %s\n' % data['user'])
256 diffs_seen = 0
256 diffs_seen = 0
257 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
257 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
258 message = ''
258 message = ''
259 for part in msg.walk():
259 for part in msg.walk():
260 content_type = pycompat.bytestr(part.get_content_type())
260 content_type = pycompat.bytestr(part.get_content_type())
261 ui.debug('Content-Type: %s\n' % content_type)
261 ui.debug('Content-Type: %s\n' % content_type)
262 if content_type not in ok_types:
262 if content_type not in ok_types:
263 continue
263 continue
264 payload = part.get_payload(decode=True)
264 payload = part.get_payload(decode=True)
265 m = diffre.search(payload)
265 m = diffre.search(payload)
266 if m:
266 if m:
267 hgpatch = False
267 hgpatch = False
268 hgpatchheader = False
268 hgpatchheader = False
269 ignoretext = False
269 ignoretext = False
270
270
271 ui.debug('found patch at byte %d\n' % m.start(0))
271 ui.debug('found patch at byte %d\n' % m.start(0))
272 diffs_seen += 1
272 diffs_seen += 1
273 cfp = stringio()
273 cfp = stringio()
274 for line in payload[:m.start(0)].splitlines():
274 for line in payload[:m.start(0)].splitlines():
275 if line.startswith('# HG changeset patch') and not hgpatch:
275 if line.startswith('# HG changeset patch') and not hgpatch:
276 ui.debug('patch generated by hg export\n')
276 ui.debug('patch generated by hg export\n')
277 hgpatch = True
277 hgpatch = True
278 hgpatchheader = True
278 hgpatchheader = True
279 # drop earlier commit message content
279 # drop earlier commit message content
280 cfp.seek(0)
280 cfp.seek(0)
281 cfp.truncate()
281 cfp.truncate()
282 subject = None
282 subject = None
283 elif hgpatchheader:
283 elif hgpatchheader:
284 if line.startswith('# User '):
284 if line.startswith('# User '):
285 data['user'] = line[7:]
285 data['user'] = line[7:]
286 ui.debug('From: %s\n' % data['user'])
286 ui.debug('From: %s\n' % data['user'])
287 elif line.startswith("# Parent "):
287 elif line.startswith("# Parent "):
288 parents.append(line[9:].lstrip())
288 parents.append(line[9:].lstrip())
289 elif line.startswith("# "):
289 elif line.startswith("# "):
290 for header, key in patchheadermap:
290 for header, key in patchheadermap:
291 prefix = '# %s ' % header
291 prefix = '# %s ' % header
292 if line.startswith(prefix):
292 if line.startswith(prefix):
293 data[key] = line[len(prefix):]
293 data[key] = line[len(prefix):]
294 else:
294 else:
295 hgpatchheader = False
295 hgpatchheader = False
296 elif line == '---':
296 elif line == '---':
297 ignoretext = True
297 ignoretext = True
298 if not hgpatchheader and not ignoretext:
298 if not hgpatchheader and not ignoretext:
299 cfp.write(line)
299 cfp.write(line)
300 cfp.write('\n')
300 cfp.write('\n')
301 message = cfp.getvalue()
301 message = cfp.getvalue()
302 if tmpfp:
302 if tmpfp:
303 tmpfp.write(payload)
303 tmpfp.write(payload)
304 if not payload.endswith('\n'):
304 if not payload.endswith('\n'):
305 tmpfp.write('\n')
305 tmpfp.write('\n')
306 elif not diffs_seen and message and content_type == 'text/plain':
306 elif not diffs_seen and message and content_type == 'text/plain':
307 message += '\n' + payload
307 message += '\n' + payload
308
308
309 if subject and not message.startswith(subject):
309 if subject and not message.startswith(subject):
310 message = '%s\n%s' % (subject, message)
310 message = '%s\n%s' % (subject, message)
311 data['message'] = message
311 data['message'] = message
312 tmpfp.close()
312 tmpfp.close()
313 if parents:
313 if parents:
314 data['p1'] = parents.pop(0)
314 data['p1'] = parents.pop(0)
315 if parents:
315 if parents:
316 data['p2'] = parents.pop(0)
316 data['p2'] = parents.pop(0)
317
317
318 if diffs_seen:
318 if diffs_seen:
319 data['filename'] = tmpname
319 data['filename'] = tmpname
320
320
321 return data
321 return data
322
322
323 class patchmeta(object):
323 class patchmeta(object):
324 """Patched file metadata
324 """Patched file metadata
325
325
326 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
326 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
327 or COPY. 'path' is patched file path. 'oldpath' is set to the
327 or COPY. 'path' is patched file path. 'oldpath' is set to the
328 origin file when 'op' is either COPY or RENAME, None otherwise. If
328 origin file when 'op' is either COPY or RENAME, None otherwise. If
329 file mode is changed, 'mode' is a tuple (islink, isexec) where
329 file mode is changed, 'mode' is a tuple (islink, isexec) where
330 'islink' is True if the file is a symlink and 'isexec' is True if
330 'islink' is True if the file is a symlink and 'isexec' is True if
331 the file is executable. Otherwise, 'mode' is None.
331 the file is executable. Otherwise, 'mode' is None.
332 """
332 """
333 def __init__(self, path):
333 def __init__(self, path):
334 self.path = path
334 self.path = path
335 self.oldpath = None
335 self.oldpath = None
336 self.mode = None
336 self.mode = None
337 self.op = 'MODIFY'
337 self.op = 'MODIFY'
338 self.binary = False
338 self.binary = False
339
339
340 def setmode(self, mode):
340 def setmode(self, mode):
341 islink = mode & 0o20000
341 islink = mode & 0o20000
342 isexec = mode & 0o100
342 isexec = mode & 0o100
343 self.mode = (islink, isexec)
343 self.mode = (islink, isexec)
344
344
345 def copy(self):
345 def copy(self):
346 other = patchmeta(self.path)
346 other = patchmeta(self.path)
347 other.oldpath = self.oldpath
347 other.oldpath = self.oldpath
348 other.mode = self.mode
348 other.mode = self.mode
349 other.op = self.op
349 other.op = self.op
350 other.binary = self.binary
350 other.binary = self.binary
351 return other
351 return other
352
352
353 def _ispatchinga(self, afile):
353 def _ispatchinga(self, afile):
354 if afile == '/dev/null':
354 if afile == '/dev/null':
355 return self.op == 'ADD'
355 return self.op == 'ADD'
356 return afile == 'a/' + (self.oldpath or self.path)
356 return afile == 'a/' + (self.oldpath or self.path)
357
357
358 def _ispatchingb(self, bfile):
358 def _ispatchingb(self, bfile):
359 if bfile == '/dev/null':
359 if bfile == '/dev/null':
360 return self.op == 'DELETE'
360 return self.op == 'DELETE'
361 return bfile == 'b/' + self.path
361 return bfile == 'b/' + self.path
362
362
363 def ispatching(self, afile, bfile):
363 def ispatching(self, afile, bfile):
364 return self._ispatchinga(afile) and self._ispatchingb(bfile)
364 return self._ispatchinga(afile) and self._ispatchingb(bfile)
365
365
366 def __repr__(self):
366 def __repr__(self):
367 return "<patchmeta %s %r>" % (self.op, self.path)
367 return "<patchmeta %s %r>" % (self.op, self.path)
368
368
369 def readgitpatch(lr):
369 def readgitpatch(lr):
370 """extract git-style metadata about patches from <patchname>"""
370 """extract git-style metadata about patches from <patchname>"""
371
371
372 # Filter patch for git information
372 # Filter patch for git information
373 gp = None
373 gp = None
374 gitpatches = []
374 gitpatches = []
375 for line in lr:
375 for line in lr:
376 line = line.rstrip(' \r\n')
376 line = line.rstrip(' \r\n')
377 if line.startswith('diff --git a/'):
377 if line.startswith('diff --git a/'):
378 m = gitre.match(line)
378 m = gitre.match(line)
379 if m:
379 if m:
380 if gp:
380 if gp:
381 gitpatches.append(gp)
381 gitpatches.append(gp)
382 dst = m.group(2)
382 dst = m.group(2)
383 gp = patchmeta(dst)
383 gp = patchmeta(dst)
384 elif gp:
384 elif gp:
385 if line.startswith('--- '):
385 if line.startswith('--- '):
386 gitpatches.append(gp)
386 gitpatches.append(gp)
387 gp = None
387 gp = None
388 continue
388 continue
389 if line.startswith('rename from '):
389 if line.startswith('rename from '):
390 gp.op = 'RENAME'
390 gp.op = 'RENAME'
391 gp.oldpath = line[12:]
391 gp.oldpath = line[12:]
392 elif line.startswith('rename to '):
392 elif line.startswith('rename to '):
393 gp.path = line[10:]
393 gp.path = line[10:]
394 elif line.startswith('copy from '):
394 elif line.startswith('copy from '):
395 gp.op = 'COPY'
395 gp.op = 'COPY'
396 gp.oldpath = line[10:]
396 gp.oldpath = line[10:]
397 elif line.startswith('copy to '):
397 elif line.startswith('copy to '):
398 gp.path = line[8:]
398 gp.path = line[8:]
399 elif line.startswith('deleted file'):
399 elif line.startswith('deleted file'):
400 gp.op = 'DELETE'
400 gp.op = 'DELETE'
401 elif line.startswith('new file mode '):
401 elif line.startswith('new file mode '):
402 gp.op = 'ADD'
402 gp.op = 'ADD'
403 gp.setmode(int(line[-6:], 8))
403 gp.setmode(int(line[-6:], 8))
404 elif line.startswith('new mode '):
404 elif line.startswith('new mode '):
405 gp.setmode(int(line[-6:], 8))
405 gp.setmode(int(line[-6:], 8))
406 elif line.startswith('GIT binary patch'):
406 elif line.startswith('GIT binary patch'):
407 gp.binary = True
407 gp.binary = True
408 if gp:
408 if gp:
409 gitpatches.append(gp)
409 gitpatches.append(gp)
410
410
411 return gitpatches
411 return gitpatches
412
412
413 class linereader(object):
413 class linereader(object):
414 # simple class to allow pushing lines back into the input stream
414 # simple class to allow pushing lines back into the input stream
415 def __init__(self, fp):
415 def __init__(self, fp):
416 self.fp = fp
416 self.fp = fp
417 self.buf = []
417 self.buf = []
418
418
419 def push(self, line):
419 def push(self, line):
420 if line is not None:
420 if line is not None:
421 self.buf.append(line)
421 self.buf.append(line)
422
422
423 def readline(self):
423 def readline(self):
424 if self.buf:
424 if self.buf:
425 l = self.buf[0]
425 l = self.buf[0]
426 del self.buf[0]
426 del self.buf[0]
427 return l
427 return l
428 return self.fp.readline()
428 return self.fp.readline()
429
429
430 def __iter__(self):
430 def __iter__(self):
431 return iter(self.readline, '')
431 return iter(self.readline, '')
432
432
433 class abstractbackend(object):
433 class abstractbackend(object):
434 def __init__(self, ui):
434 def __init__(self, ui):
435 self.ui = ui
435 self.ui = ui
436
436
437 def getfile(self, fname):
437 def getfile(self, fname):
438 """Return target file data and flags as a (data, (islink,
438 """Return target file data and flags as a (data, (islink,
439 isexec)) tuple. Data is None if file is missing/deleted.
439 isexec)) tuple. Data is None if file is missing/deleted.
440 """
440 """
441 raise NotImplementedError
441 raise NotImplementedError
442
442
443 def setfile(self, fname, data, mode, copysource):
443 def setfile(self, fname, data, mode, copysource):
444 """Write data to target file fname and set its mode. mode is a
444 """Write data to target file fname and set its mode. mode is a
445 (islink, isexec) tuple. If data is None, the file content should
445 (islink, isexec) tuple. If data is None, the file content should
446 be left unchanged. If the file is modified after being copied,
446 be left unchanged. If the file is modified after being copied,
447 copysource is set to the original file name.
447 copysource is set to the original file name.
448 """
448 """
449 raise NotImplementedError
449 raise NotImplementedError
450
450
451 def unlink(self, fname):
451 def unlink(self, fname):
452 """Unlink target file."""
452 """Unlink target file."""
453 raise NotImplementedError
453 raise NotImplementedError
454
454
455 def writerej(self, fname, failed, total, lines):
455 def writerej(self, fname, failed, total, lines):
456 """Write rejected lines for fname. total is the number of hunks
456 """Write rejected lines for fname. total is the number of hunks
457 which failed to apply and total the total number of hunks for this
457 which failed to apply and total the total number of hunks for this
458 files.
458 files.
459 """
459 """
460
460
461 def exists(self, fname):
461 def exists(self, fname):
462 raise NotImplementedError
462 raise NotImplementedError
463
463
464 def close(self):
464 def close(self):
465 raise NotImplementedError
465 raise NotImplementedError
466
466
467 class fsbackend(abstractbackend):
467 class fsbackend(abstractbackend):
468 def __init__(self, ui, basedir):
468 def __init__(self, ui, basedir):
469 super(fsbackend, self).__init__(ui)
469 super(fsbackend, self).__init__(ui)
470 self.opener = vfsmod.vfs(basedir)
470 self.opener = vfsmod.vfs(basedir)
471
471
472 def getfile(self, fname):
472 def getfile(self, fname):
473 if self.opener.islink(fname):
473 if self.opener.islink(fname):
474 return (self.opener.readlink(fname), (True, False))
474 return (self.opener.readlink(fname), (True, False))
475
475
476 isexec = False
476 isexec = False
477 try:
477 try:
478 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
478 isexec = self.opener.lstat(fname).st_mode & 0o100 != 0
479 except OSError as e:
479 except OSError as e:
480 if e.errno != errno.ENOENT:
480 if e.errno != errno.ENOENT:
481 raise
481 raise
482 try:
482 try:
483 return (self.opener.read(fname), (False, isexec))
483 return (self.opener.read(fname), (False, isexec))
484 except IOError as e:
484 except IOError as e:
485 if e.errno != errno.ENOENT:
485 if e.errno != errno.ENOENT:
486 raise
486 raise
487 return None, None
487 return None, None
488
488
489 def setfile(self, fname, data, mode, copysource):
489 def setfile(self, fname, data, mode, copysource):
490 islink, isexec = mode
490 islink, isexec = mode
491 if data is None:
491 if data is None:
492 self.opener.setflags(fname, islink, isexec)
492 self.opener.setflags(fname, islink, isexec)
493 return
493 return
494 if islink:
494 if islink:
495 self.opener.symlink(data, fname)
495 self.opener.symlink(data, fname)
496 else:
496 else:
497 self.opener.write(fname, data)
497 self.opener.write(fname, data)
498 if isexec:
498 if isexec:
499 self.opener.setflags(fname, False, True)
499 self.opener.setflags(fname, False, True)
500
500
501 def unlink(self, fname):
501 def unlink(self, fname):
502 rmdir = self.ui.configbool('experimental', 'removeemptydirs')
502 rmdir = self.ui.configbool('experimental', 'removeemptydirs')
503 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
503 self.opener.unlinkpath(fname, ignoremissing=True, rmdir=rmdir)
504
504
505 def writerej(self, fname, failed, total, lines):
505 def writerej(self, fname, failed, total, lines):
506 fname = fname + ".rej"
506 fname = fname + ".rej"
507 self.ui.warn(
507 self.ui.warn(
508 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
508 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
509 (failed, total, fname))
509 (failed, total, fname))
510 fp = self.opener(fname, 'w')
510 fp = self.opener(fname, 'w')
511 fp.writelines(lines)
511 fp.writelines(lines)
512 fp.close()
512 fp.close()
513
513
514 def exists(self, fname):
514 def exists(self, fname):
515 return self.opener.lexists(fname)
515 return self.opener.lexists(fname)
516
516
517 class workingbackend(fsbackend):
517 class workingbackend(fsbackend):
518 def __init__(self, ui, repo, similarity):
518 def __init__(self, ui, repo, similarity):
519 super(workingbackend, self).__init__(ui, repo.root)
519 super(workingbackend, self).__init__(ui, repo.root)
520 self.repo = repo
520 self.repo = repo
521 self.similarity = similarity
521 self.similarity = similarity
522 self.removed = set()
522 self.removed = set()
523 self.changed = set()
523 self.changed = set()
524 self.copied = []
524 self.copied = []
525
525
526 def _checkknown(self, fname):
526 def _checkknown(self, fname):
527 if self.repo.dirstate[fname] == '?' and self.exists(fname):
527 if self.repo.dirstate[fname] == '?' and self.exists(fname):
528 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
528 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
529
529
530 def setfile(self, fname, data, mode, copysource):
530 def setfile(self, fname, data, mode, copysource):
531 self._checkknown(fname)
531 self._checkknown(fname)
532 super(workingbackend, self).setfile(fname, data, mode, copysource)
532 super(workingbackend, self).setfile(fname, data, mode, copysource)
533 if copysource is not None:
533 if copysource is not None:
534 self.copied.append((copysource, fname))
534 self.copied.append((copysource, fname))
535 self.changed.add(fname)
535 self.changed.add(fname)
536
536
537 def unlink(self, fname):
537 def unlink(self, fname):
538 self._checkknown(fname)
538 self._checkknown(fname)
539 super(workingbackend, self).unlink(fname)
539 super(workingbackend, self).unlink(fname)
540 self.removed.add(fname)
540 self.removed.add(fname)
541 self.changed.add(fname)
541 self.changed.add(fname)
542
542
543 def close(self):
543 def close(self):
544 wctx = self.repo[None]
544 wctx = self.repo[None]
545 changed = set(self.changed)
545 changed = set(self.changed)
546 for src, dst in self.copied:
546 for src, dst in self.copied:
547 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
547 scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
548 if self.removed:
548 if self.removed:
549 wctx.forget(sorted(self.removed))
549 wctx.forget(sorted(self.removed))
550 for f in self.removed:
550 for f in self.removed:
551 if f not in self.repo.dirstate:
551 if f not in self.repo.dirstate:
552 # File was deleted and no longer belongs to the
552 # File was deleted and no longer belongs to the
553 # dirstate, it was probably marked added then
553 # dirstate, it was probably marked added then
554 # deleted, and should not be considered by
554 # deleted, and should not be considered by
555 # marktouched().
555 # marktouched().
556 changed.discard(f)
556 changed.discard(f)
557 if changed:
557 if changed:
558 scmutil.marktouched(self.repo, changed, self.similarity)
558 scmutil.marktouched(self.repo, changed, self.similarity)
559 return sorted(self.changed)
559 return sorted(self.changed)
560
560
561 class filestore(object):
561 class filestore(object):
562 def __init__(self, maxsize=None):
562 def __init__(self, maxsize=None):
563 self.opener = None
563 self.opener = None
564 self.files = {}
564 self.files = {}
565 self.created = 0
565 self.created = 0
566 self.maxsize = maxsize
566 self.maxsize = maxsize
567 if self.maxsize is None:
567 if self.maxsize is None:
568 self.maxsize = 4*(2**20)
568 self.maxsize = 4*(2**20)
569 self.size = 0
569 self.size = 0
570 self.data = {}
570 self.data = {}
571
571
572 def setfile(self, fname, data, mode, copied=None):
572 def setfile(self, fname, data, mode, copied=None):
573 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
573 if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize:
574 self.data[fname] = (data, mode, copied)
574 self.data[fname] = (data, mode, copied)
575 self.size += len(data)
575 self.size += len(data)
576 else:
576 else:
577 if self.opener is None:
577 if self.opener is None:
578 root = pycompat.mkdtemp(prefix='hg-patch-')
578 root = pycompat.mkdtemp(prefix='hg-patch-')
579 self.opener = vfsmod.vfs(root)
579 self.opener = vfsmod.vfs(root)
580 # Avoid filename issues with these simple names
580 # Avoid filename issues with these simple names
581 fn = '%d' % self.created
581 fn = '%d' % self.created
582 self.opener.write(fn, data)
582 self.opener.write(fn, data)
583 self.created += 1
583 self.created += 1
584 self.files[fname] = (fn, mode, copied)
584 self.files[fname] = (fn, mode, copied)
585
585
586 def getfile(self, fname):
586 def getfile(self, fname):
587 if fname in self.data:
587 if fname in self.data:
588 return self.data[fname]
588 return self.data[fname]
589 if not self.opener or fname not in self.files:
589 if not self.opener or fname not in self.files:
590 return None, None, None
590 return None, None, None
591 fn, mode, copied = self.files[fname]
591 fn, mode, copied = self.files[fname]
592 return self.opener.read(fn), mode, copied
592 return self.opener.read(fn), mode, copied
593
593
594 def close(self):
594 def close(self):
595 if self.opener:
595 if self.opener:
596 shutil.rmtree(self.opener.base)
596 shutil.rmtree(self.opener.base)
597
597
598 class repobackend(abstractbackend):
598 class repobackend(abstractbackend):
599 def __init__(self, ui, repo, ctx, store):
599 def __init__(self, ui, repo, ctx, store):
600 super(repobackend, self).__init__(ui)
600 super(repobackend, self).__init__(ui)
601 self.repo = repo
601 self.repo = repo
602 self.ctx = ctx
602 self.ctx = ctx
603 self.store = store
603 self.store = store
604 self.changed = set()
604 self.changed = set()
605 self.removed = set()
605 self.removed = set()
606 self.copied = {}
606 self.copied = {}
607
607
608 def _checkknown(self, fname):
608 def _checkknown(self, fname):
609 if fname not in self.ctx:
609 if fname not in self.ctx:
610 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
610 raise PatchError(_('cannot patch %s: file is not tracked') % fname)
611
611
612 def getfile(self, fname):
612 def getfile(self, fname):
613 try:
613 try:
614 fctx = self.ctx[fname]
614 fctx = self.ctx[fname]
615 except error.LookupError:
615 except error.LookupError:
616 return None, None
616 return None, None
617 flags = fctx.flags()
617 flags = fctx.flags()
618 return fctx.data(), ('l' in flags, 'x' in flags)
618 return fctx.data(), ('l' in flags, 'x' in flags)
619
619
620 def setfile(self, fname, data, mode, copysource):
620 def setfile(self, fname, data, mode, copysource):
621 if copysource:
621 if copysource:
622 self._checkknown(copysource)
622 self._checkknown(copysource)
623 if data is None:
623 if data is None:
624 data = self.ctx[fname].data()
624 data = self.ctx[fname].data()
625 self.store.setfile(fname, data, mode, copysource)
625 self.store.setfile(fname, data, mode, copysource)
626 self.changed.add(fname)
626 self.changed.add(fname)
627 if copysource:
627 if copysource:
628 self.copied[fname] = copysource
628 self.copied[fname] = copysource
629
629
630 def unlink(self, fname):
630 def unlink(self, fname):
631 self._checkknown(fname)
631 self._checkknown(fname)
632 self.removed.add(fname)
632 self.removed.add(fname)
633
633
634 def exists(self, fname):
634 def exists(self, fname):
635 return fname in self.ctx
635 return fname in self.ctx
636
636
637 def close(self):
637 def close(self):
638 return self.changed | self.removed
638 return self.changed | self.removed
639
639
640 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
640 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
641 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
641 unidesc = re.compile(br'@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
642 contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
642 contextdesc = re.compile(br'(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
643 eolmodes = ['strict', 'crlf', 'lf', 'auto']
643 eolmodes = ['strict', 'crlf', 'lf', 'auto']
644
644
645 class patchfile(object):
645 class patchfile(object):
646 def __init__(self, ui, gp, backend, store, eolmode='strict'):
646 def __init__(self, ui, gp, backend, store, eolmode='strict'):
647 self.fname = gp.path
647 self.fname = gp.path
648 self.eolmode = eolmode
648 self.eolmode = eolmode
649 self.eol = None
649 self.eol = None
650 self.backend = backend
650 self.backend = backend
651 self.ui = ui
651 self.ui = ui
652 self.lines = []
652 self.lines = []
653 self.exists = False
653 self.exists = False
654 self.missing = True
654 self.missing = True
655 self.mode = gp.mode
655 self.mode = gp.mode
656 self.copysource = gp.oldpath
656 self.copysource = gp.oldpath
657 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
657 self.create = gp.op in ('ADD', 'COPY', 'RENAME')
658 self.remove = gp.op == 'DELETE'
658 self.remove = gp.op == 'DELETE'
659 if self.copysource is None:
659 if self.copysource is None:
660 data, mode = backend.getfile(self.fname)
660 data, mode = backend.getfile(self.fname)
661 else:
661 else:
662 data, mode = store.getfile(self.copysource)[:2]
662 data, mode = store.getfile(self.copysource)[:2]
663 if data is not None:
663 if data is not None:
664 self.exists = self.copysource is None or backend.exists(self.fname)
664 self.exists = self.copysource is None or backend.exists(self.fname)
665 self.missing = False
665 self.missing = False
666 if data:
666 if data:
667 self.lines = mdiff.splitnewlines(data)
667 self.lines = mdiff.splitnewlines(data)
668 if self.mode is None:
668 if self.mode is None:
669 self.mode = mode
669 self.mode = mode
670 if self.lines:
670 if self.lines:
671 # Normalize line endings
671 # Normalize line endings
672 if self.lines[0].endswith('\r\n'):
672 if self.lines[0].endswith('\r\n'):
673 self.eol = '\r\n'
673 self.eol = '\r\n'
674 elif self.lines[0].endswith('\n'):
674 elif self.lines[0].endswith('\n'):
675 self.eol = '\n'
675 self.eol = '\n'
676 if eolmode != 'strict':
676 if eolmode != 'strict':
677 nlines = []
677 nlines = []
678 for l in self.lines:
678 for l in self.lines:
679 if l.endswith('\r\n'):
679 if l.endswith('\r\n'):
680 l = l[:-2] + '\n'
680 l = l[:-2] + '\n'
681 nlines.append(l)
681 nlines.append(l)
682 self.lines = nlines
682 self.lines = nlines
683 else:
683 else:
684 if self.create:
684 if self.create:
685 self.missing = False
685 self.missing = False
686 if self.mode is None:
686 if self.mode is None:
687 self.mode = (False, False)
687 self.mode = (False, False)
688 if self.missing:
688 if self.missing:
689 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
689 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
690 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
690 self.ui.warn(_("(use '--prefix' to apply patch relative to the "
691 "current directory)\n"))
691 "current directory)\n"))
692
692
693 self.hash = {}
693 self.hash = {}
694 self.dirty = 0
694 self.dirty = 0
695 self.offset = 0
695 self.offset = 0
696 self.skew = 0
696 self.skew = 0
697 self.rej = []
697 self.rej = []
698 self.fileprinted = False
698 self.fileprinted = False
699 self.printfile(False)
699 self.printfile(False)
700 self.hunks = 0
700 self.hunks = 0
701
701
702 def writelines(self, fname, lines, mode):
702 def writelines(self, fname, lines, mode):
703 if self.eolmode == 'auto':
703 if self.eolmode == 'auto':
704 eol = self.eol
704 eol = self.eol
705 elif self.eolmode == 'crlf':
705 elif self.eolmode == 'crlf':
706 eol = '\r\n'
706 eol = '\r\n'
707 else:
707 else:
708 eol = '\n'
708 eol = '\n'
709
709
710 if self.eolmode != 'strict' and eol and eol != '\n':
710 if self.eolmode != 'strict' and eol and eol != '\n':
711 rawlines = []
711 rawlines = []
712 for l in lines:
712 for l in lines:
713 if l and l.endswith('\n'):
713 if l and l.endswith('\n'):
714 l = l[:-1] + eol
714 l = l[:-1] + eol
715 rawlines.append(l)
715 rawlines.append(l)
716 lines = rawlines
716 lines = rawlines
717
717
718 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
718 self.backend.setfile(fname, ''.join(lines), mode, self.copysource)
719
719
720 def printfile(self, warn):
720 def printfile(self, warn):
721 if self.fileprinted:
721 if self.fileprinted:
722 return
722 return
723 if warn or self.ui.verbose:
723 if warn or self.ui.verbose:
724 self.fileprinted = True
724 self.fileprinted = True
725 s = _("patching file %s\n") % self.fname
725 s = _("patching file %s\n") % self.fname
726 if warn:
726 if warn:
727 self.ui.warn(s)
727 self.ui.warn(s)
728 else:
728 else:
729 self.ui.note(s)
729 self.ui.note(s)
730
730
731
731
732 def findlines(self, l, linenum):
732 def findlines(self, l, linenum):
733 # looks through the hash and finds candidate lines. The
733 # looks through the hash and finds candidate lines. The
734 # result is a list of line numbers sorted based on distance
734 # result is a list of line numbers sorted based on distance
735 # from linenum
735 # from linenum
736
736
737 cand = self.hash.get(l, [])
737 cand = self.hash.get(l, [])
738 if len(cand) > 1:
738 if len(cand) > 1:
739 # resort our list of potentials forward then back.
739 # resort our list of potentials forward then back.
740 cand.sort(key=lambda x: abs(x - linenum))
740 cand.sort(key=lambda x: abs(x - linenum))
741 return cand
741 return cand
742
742
743 def write_rej(self):
743 def write_rej(self):
744 # our rejects are a little different from patch(1). This always
744 # our rejects are a little different from patch(1). This always
745 # creates rejects in the same form as the original patch. A file
745 # creates rejects in the same form as the original patch. A file
746 # header is inserted so that you can run the reject through patch again
746 # header is inserted so that you can run the reject through patch again
747 # without having to type the filename.
747 # without having to type the filename.
748 if not self.rej:
748 if not self.rej:
749 return
749 return
750 base = os.path.basename(self.fname)
750 base = os.path.basename(self.fname)
751 lines = ["--- %s\n+++ %s\n" % (base, base)]
751 lines = ["--- %s\n+++ %s\n" % (base, base)]
752 for x in self.rej:
752 for x in self.rej:
753 for l in x.hunk:
753 for l in x.hunk:
754 lines.append(l)
754 lines.append(l)
755 if l[-1:] != '\n':
755 if l[-1:] != '\n':
756 lines.append("\n\ No newline at end of file\n")
756 lines.append("\n\ No newline at end of file\n")
757 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
757 self.backend.writerej(self.fname, len(self.rej), self.hunks, lines)
758
758
759 def apply(self, h):
759 def apply(self, h):
760 if not h.complete():
760 if not h.complete():
761 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
761 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
762 (h.number, h.desc, len(h.a), h.lena, len(h.b),
762 (h.number, h.desc, len(h.a), h.lena, len(h.b),
763 h.lenb))
763 h.lenb))
764
764
765 self.hunks += 1
765 self.hunks += 1
766
766
767 if self.missing:
767 if self.missing:
768 self.rej.append(h)
768 self.rej.append(h)
769 return -1
769 return -1
770
770
771 if self.exists and self.create:
771 if self.exists and self.create:
772 if self.copysource:
772 if self.copysource:
773 self.ui.warn(_("cannot create %s: destination already "
773 self.ui.warn(_("cannot create %s: destination already "
774 "exists\n") % self.fname)
774 "exists\n") % self.fname)
775 else:
775 else:
776 self.ui.warn(_("file %s already exists\n") % self.fname)
776 self.ui.warn(_("file %s already exists\n") % self.fname)
777 self.rej.append(h)
777 self.rej.append(h)
778 return -1
778 return -1
779
779
780 if isinstance(h, binhunk):
780 if isinstance(h, binhunk):
781 if self.remove:
781 if self.remove:
782 self.backend.unlink(self.fname)
782 self.backend.unlink(self.fname)
783 else:
783 else:
784 l = h.new(self.lines)
784 l = h.new(self.lines)
785 self.lines[:] = l
785 self.lines[:] = l
786 self.offset += len(l)
786 self.offset += len(l)
787 self.dirty = True
787 self.dirty = True
788 return 0
788 return 0
789
789
790 horig = h
790 horig = h
791 if (self.eolmode in ('crlf', 'lf')
791 if (self.eolmode in ('crlf', 'lf')
792 or self.eolmode == 'auto' and self.eol):
792 or self.eolmode == 'auto' and self.eol):
793 # If new eols are going to be normalized, then normalize
793 # If new eols are going to be normalized, then normalize
794 # hunk data before patching. Otherwise, preserve input
794 # hunk data before patching. Otherwise, preserve input
795 # line-endings.
795 # line-endings.
796 h = h.getnormalized()
796 h = h.getnormalized()
797
797
798 # fast case first, no offsets, no fuzz
798 # fast case first, no offsets, no fuzz
799 old, oldstart, new, newstart = h.fuzzit(0, False)
799 old, oldstart, new, newstart = h.fuzzit(0, False)
800 oldstart += self.offset
800 oldstart += self.offset
801 orig_start = oldstart
801 orig_start = oldstart
802 # if there's skew we want to emit the "(offset %d lines)" even
802 # if there's skew we want to emit the "(offset %d lines)" even
803 # when the hunk cleanly applies at start + skew, so skip the
803 # when the hunk cleanly applies at start + skew, so skip the
804 # fast case code
804 # fast case code
805 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
805 if self.skew == 0 and diffhelper.testhunk(old, self.lines, oldstart):
806 if self.remove:
806 if self.remove:
807 self.backend.unlink(self.fname)
807 self.backend.unlink(self.fname)
808 else:
808 else:
809 self.lines[oldstart:oldstart + len(old)] = new
809 self.lines[oldstart:oldstart + len(old)] = new
810 self.offset += len(new) - len(old)
810 self.offset += len(new) - len(old)
811 self.dirty = True
811 self.dirty = True
812 return 0
812 return 0
813
813
814 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
814 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
815 self.hash = {}
815 self.hash = {}
816 for x, s in enumerate(self.lines):
816 for x, s in enumerate(self.lines):
817 self.hash.setdefault(s, []).append(x)
817 self.hash.setdefault(s, []).append(x)
818
818
819 for fuzzlen in pycompat.xrange(self.ui.configint("patch", "fuzz") + 1):
819 for fuzzlen in pycompat.xrange(self.ui.configint("patch", "fuzz") + 1):
820 for toponly in [True, False]:
820 for toponly in [True, False]:
821 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
821 old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
822 oldstart = oldstart + self.offset + self.skew
822 oldstart = oldstart + self.offset + self.skew
823 oldstart = min(oldstart, len(self.lines))
823 oldstart = min(oldstart, len(self.lines))
824 if old:
824 if old:
825 cand = self.findlines(old[0][1:], oldstart)
825 cand = self.findlines(old[0][1:], oldstart)
826 else:
826 else:
827 # Only adding lines with no or fuzzed context, just
827 # Only adding lines with no or fuzzed context, just
828 # take the skew in account
828 # take the skew in account
829 cand = [oldstart]
829 cand = [oldstart]
830
830
831 for l in cand:
831 for l in cand:
832 if not old or diffhelper.testhunk(old, self.lines, l):
832 if not old or diffhelper.testhunk(old, self.lines, l):
833 self.lines[l : l + len(old)] = new
833 self.lines[l : l + len(old)] = new
834 self.offset += len(new) - len(old)
834 self.offset += len(new) - len(old)
835 self.skew = l - orig_start
835 self.skew = l - orig_start
836 self.dirty = True
836 self.dirty = True
837 offset = l - orig_start - fuzzlen
837 offset = l - orig_start - fuzzlen
838 if fuzzlen:
838 if fuzzlen:
839 msg = _("Hunk #%d succeeded at %d "
839 msg = _("Hunk #%d succeeded at %d "
840 "with fuzz %d "
840 "with fuzz %d "
841 "(offset %d lines).\n")
841 "(offset %d lines).\n")
842 self.printfile(True)
842 self.printfile(True)
843 self.ui.warn(msg %
843 self.ui.warn(msg %
844 (h.number, l + 1, fuzzlen, offset))
844 (h.number, l + 1, fuzzlen, offset))
845 else:
845 else:
846 msg = _("Hunk #%d succeeded at %d "
846 msg = _("Hunk #%d succeeded at %d "
847 "(offset %d lines).\n")
847 "(offset %d lines).\n")
848 self.ui.note(msg % (h.number, l + 1, offset))
848 self.ui.note(msg % (h.number, l + 1, offset))
849 return fuzzlen
849 return fuzzlen
850 self.printfile(True)
850 self.printfile(True)
851 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
851 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
852 self.rej.append(horig)
852 self.rej.append(horig)
853 return -1
853 return -1
854
854
855 def close(self):
855 def close(self):
856 if self.dirty:
856 if self.dirty:
857 self.writelines(self.fname, self.lines, self.mode)
857 self.writelines(self.fname, self.lines, self.mode)
858 self.write_rej()
858 self.write_rej()
859 return len(self.rej)
859 return len(self.rej)
860
860
861 class header(object):
861 class header(object):
862 """patch header
862 """patch header
863 """
863 """
864 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
864 diffgit_re = re.compile('diff --git a/(.*) b/(.*)$')
865 diff_re = re.compile('diff -r .* (.*)$')
865 diff_re = re.compile('diff -r .* (.*)$')
866 allhunks_re = re.compile('(?:index|deleted file) ')
866 allhunks_re = re.compile('(?:index|deleted file) ')
867 pretty_re = re.compile('(?:new file|deleted file) ')
867 pretty_re = re.compile('(?:new file|deleted file) ')
868 special_re = re.compile('(?:index|deleted|copy|rename) ')
868 special_re = re.compile('(?:index|deleted|copy|rename) ')
869 newfile_re = re.compile('(?:new file)')
869 newfile_re = re.compile('(?:new file)')
870
870
871 def __init__(self, header):
871 def __init__(self, header):
872 self.header = header
872 self.header = header
873 self.hunks = []
873 self.hunks = []
874
874
875 def binary(self):
875 def binary(self):
876 return any(h.startswith('index ') for h in self.header)
876 return any(h.startswith('index ') for h in self.header)
877
877
878 def pretty(self, fp):
878 def pretty(self, fp):
879 for h in self.header:
879 for h in self.header:
880 if h.startswith('index '):
880 if h.startswith('index '):
881 fp.write(_('this modifies a binary file (all or nothing)\n'))
881 fp.write(_('this modifies a binary file (all or nothing)\n'))
882 break
882 break
883 if self.pretty_re.match(h):
883 if self.pretty_re.match(h):
884 fp.write(h)
884 fp.write(h)
885 if self.binary():
885 if self.binary():
886 fp.write(_('this is a binary file\n'))
886 fp.write(_('this is a binary file\n'))
887 break
887 break
888 if h.startswith('---'):
888 if h.startswith('---'):
889 fp.write(_('%d hunks, %d lines changed\n') %
889 fp.write(_('%d hunks, %d lines changed\n') %
890 (len(self.hunks),
890 (len(self.hunks),
891 sum([max(h.added, h.removed) for h in self.hunks])))
891 sum([max(h.added, h.removed) for h in self.hunks])))
892 break
892 break
893 fp.write(h)
893 fp.write(h)
894
894
895 def write(self, fp):
895 def write(self, fp):
896 fp.write(''.join(self.header))
896 fp.write(''.join(self.header))
897
897
898 def allhunks(self):
898 def allhunks(self):
899 return any(self.allhunks_re.match(h) for h in self.header)
899 return any(self.allhunks_re.match(h) for h in self.header)
900
900
901 def files(self):
901 def files(self):
902 match = self.diffgit_re.match(self.header[0])
902 match = self.diffgit_re.match(self.header[0])
903 if match:
903 if match:
904 fromfile, tofile = match.groups()
904 fromfile, tofile = match.groups()
905 if fromfile == tofile:
905 if fromfile == tofile:
906 return [fromfile]
906 return [fromfile]
907 return [fromfile, tofile]
907 return [fromfile, tofile]
908 else:
908 else:
909 return self.diff_re.match(self.header[0]).groups()
909 return self.diff_re.match(self.header[0]).groups()
910
910
911 def filename(self):
911 def filename(self):
912 return self.files()[-1]
912 return self.files()[-1]
913
913
914 def __repr__(self):
914 def __repr__(self):
915 return '<header %s>' % (' '.join(map(repr, self.files())))
915 return '<header %s>' % (' '.join(map(repr, self.files())))
916
916
917 def isnewfile(self):
917 def isnewfile(self):
918 return any(self.newfile_re.match(h) for h in self.header)
918 return any(self.newfile_re.match(h) for h in self.header)
919
919
920 def special(self):
920 def special(self):
921 # Special files are shown only at the header level and not at the hunk
921 # Special files are shown only at the header level and not at the hunk
922 # level for example a file that has been deleted is a special file.
922 # level for example a file that has been deleted is a special file.
923 # The user cannot change the content of the operation, in the case of
923 # The user cannot change the content of the operation, in the case of
924 # the deleted file he has to take the deletion or not take it, he
924 # the deleted file he has to take the deletion or not take it, he
925 # cannot take some of it.
925 # cannot take some of it.
926 # Newly added files are special if they are empty, they are not special
926 # Newly added files are special if they are empty, they are not special
927 # if they have some content as we want to be able to change it
927 # if they have some content as we want to be able to change it
928 nocontent = len(self.header) == 2
928 nocontent = len(self.header) == 2
929 emptynewfile = self.isnewfile() and nocontent
929 emptynewfile = self.isnewfile() and nocontent
930 return emptynewfile or \
930 return emptynewfile or \
931 any(self.special_re.match(h) for h in self.header)
931 any(self.special_re.match(h) for h in self.header)
932
932
933 class recordhunk(object):
933 class recordhunk(object):
934 """patch hunk
934 """patch hunk
935
935
936 XXX shouldn't we merge this with the other hunk class?
936 XXX shouldn't we merge this with the other hunk class?
937 """
937 """
938
938
939 def __init__(self, header, fromline, toline, proc, before, hunk, after,
939 def __init__(self, header, fromline, toline, proc, before, hunk, after,
940 maxcontext=None):
940 maxcontext=None):
941 def trimcontext(lines, reverse=False):
941 def trimcontext(lines, reverse=False):
942 if maxcontext is not None:
942 if maxcontext is not None:
943 delta = len(lines) - maxcontext
943 delta = len(lines) - maxcontext
944 if delta > 0:
944 if delta > 0:
945 if reverse:
945 if reverse:
946 return delta, lines[delta:]
946 return delta, lines[delta:]
947 else:
947 else:
948 return delta, lines[:maxcontext]
948 return delta, lines[:maxcontext]
949 return 0, lines
949 return 0, lines
950
950
951 self.header = header
951 self.header = header
952 trimedbefore, self.before = trimcontext(before, True)
952 trimedbefore, self.before = trimcontext(before, True)
953 self.fromline = fromline + trimedbefore
953 self.fromline = fromline + trimedbefore
954 self.toline = toline + trimedbefore
954 self.toline = toline + trimedbefore
955 _trimedafter, self.after = trimcontext(after, False)
955 _trimedafter, self.after = trimcontext(after, False)
956 self.proc = proc
956 self.proc = proc
957 self.hunk = hunk
957 self.hunk = hunk
958 self.added, self.removed = self.countchanges(self.hunk)
958 self.added, self.removed = self.countchanges(self.hunk)
959
959
960 def __eq__(self, v):
960 def __eq__(self, v):
961 if not isinstance(v, recordhunk):
961 if not isinstance(v, recordhunk):
962 return False
962 return False
963
963
964 return ((v.hunk == self.hunk) and
964 return ((v.hunk == self.hunk) and
965 (v.proc == self.proc) and
965 (v.proc == self.proc) and
966 (self.fromline == v.fromline) and
966 (self.fromline == v.fromline) and
967 (self.header.files() == v.header.files()))
967 (self.header.files() == v.header.files()))
968
968
969 def __hash__(self):
969 def __hash__(self):
970 return hash((tuple(self.hunk),
970 return hash((tuple(self.hunk),
971 tuple(self.header.files()),
971 tuple(self.header.files()),
972 self.fromline,
972 self.fromline,
973 self.proc))
973 self.proc))
974
974
975 def countchanges(self, hunk):
975 def countchanges(self, hunk):
976 """hunk -> (n+,n-)"""
976 """hunk -> (n+,n-)"""
977 add = len([h for h in hunk if h.startswith('+')])
977 add = len([h for h in hunk if h.startswith('+')])
978 rem = len([h for h in hunk if h.startswith('-')])
978 rem = len([h for h in hunk if h.startswith('-')])
979 return add, rem
979 return add, rem
980
980
981 def reversehunk(self):
981 def reversehunk(self):
982 """return another recordhunk which is the reverse of the hunk
982 """return another recordhunk which is the reverse of the hunk
983
983
984 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
984 If this hunk is diff(A, B), the returned hunk is diff(B, A). To do
985 that, swap fromline/toline and +/- signs while keep other things
985 that, swap fromline/toline and +/- signs while keep other things
986 unchanged.
986 unchanged.
987 """
987 """
988 m = {'+': '-', '-': '+', '\\': '\\'}
988 m = {'+': '-', '-': '+', '\\': '\\'}
989 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
989 hunk = ['%s%s' % (m[l[0:1]], l[1:]) for l in self.hunk]
990 return recordhunk(self.header, self.toline, self.fromline, self.proc,
990 return recordhunk(self.header, self.toline, self.fromline, self.proc,
991 self.before, hunk, self.after)
991 self.before, hunk, self.after)
992
992
993 def write(self, fp):
993 def write(self, fp):
994 delta = len(self.before) + len(self.after)
994 delta = len(self.before) + len(self.after)
995 if self.after and self.after[-1] == '\\ No newline at end of file\n':
995 if self.after and self.after[-1] == '\\ No newline at end of file\n':
996 delta -= 1
996 delta -= 1
997 fromlen = delta + self.removed
997 fromlen = delta + self.removed
998 tolen = delta + self.added
998 tolen = delta + self.added
999 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
999 fp.write('@@ -%d,%d +%d,%d @@%s\n' %
1000 (self.fromline, fromlen, self.toline, tolen,
1000 (self.fromline, fromlen, self.toline, tolen,
1001 self.proc and (' ' + self.proc)))
1001 self.proc and (' ' + self.proc)))
1002 fp.write(''.join(self.before + self.hunk + self.after))
1002 fp.write(''.join(self.before + self.hunk + self.after))
1003
1003
1004 pretty = write
1004 pretty = write
1005
1005
1006 def filename(self):
1006 def filename(self):
1007 return self.header.filename()
1007 return self.header.filename()
1008
1008
1009 def __repr__(self):
1009 def __repr__(self):
1010 return '<hunk %r@%d>' % (self.filename(), self.fromline)
1010 return '<hunk %r@%d>' % (self.filename(), self.fromline)
1011
1011
1012 def getmessages():
1012 def getmessages():
1013 return {
1013 return {
1014 'multiple': {
1014 'multiple': {
1015 'apply': _("apply change %d/%d to '%s'?"),
1015 'apply': _("apply change %d/%d to '%s'?"),
1016 'discard': _("discard change %d/%d to '%s'?"),
1016 'discard': _("discard change %d/%d to '%s'?"),
1017 'record': _("record change %d/%d to '%s'?"),
1017 'record': _("record change %d/%d to '%s'?"),
1018 },
1018 },
1019 'single': {
1019 'single': {
1020 'apply': _("apply this change to '%s'?"),
1020 'apply': _("apply this change to '%s'?"),
1021 'discard': _("discard this change to '%s'?"),
1021 'discard': _("discard this change to '%s'?"),
1022 'record': _("record this change to '%s'?"),
1022 'record': _("record this change to '%s'?"),
1023 },
1023 },
1024 'help': {
1024 'help': {
1025 'apply': _('[Ynesfdaq?]'
1025 'apply': _('[Ynesfdaq?]'
1026 '$$ &Yes, apply this change'
1026 '$$ &Yes, apply this change'
1027 '$$ &No, skip this change'
1027 '$$ &No, skip this change'
1028 '$$ &Edit this change manually'
1028 '$$ &Edit this change manually'
1029 '$$ &Skip remaining changes to this file'
1029 '$$ &Skip remaining changes to this file'
1030 '$$ Apply remaining changes to this &file'
1030 '$$ Apply remaining changes to this &file'
1031 '$$ &Done, skip remaining changes and files'
1031 '$$ &Done, skip remaining changes and files'
1032 '$$ Apply &all changes to all remaining files'
1032 '$$ Apply &all changes to all remaining files'
1033 '$$ &Quit, applying no changes'
1033 '$$ &Quit, applying no changes'
1034 '$$ &? (display help)'),
1034 '$$ &? (display help)'),
1035 'discard': _('[Ynesfdaq?]'
1035 'discard': _('[Ynesfdaq?]'
1036 '$$ &Yes, discard this change'
1036 '$$ &Yes, discard this change'
1037 '$$ &No, skip this change'
1037 '$$ &No, skip this change'
1038 '$$ &Edit this change manually'
1038 '$$ &Edit this change manually'
1039 '$$ &Skip remaining changes to this file'
1039 '$$ &Skip remaining changes to this file'
1040 '$$ Discard remaining changes to this &file'
1040 '$$ Discard remaining changes to this &file'
1041 '$$ &Done, skip remaining changes and files'
1041 '$$ &Done, skip remaining changes and files'
1042 '$$ Discard &all changes to all remaining files'
1042 '$$ Discard &all changes to all remaining files'
1043 '$$ &Quit, discarding no changes'
1043 '$$ &Quit, discarding no changes'
1044 '$$ &? (display help)'),
1044 '$$ &? (display help)'),
1045 'record': _('[Ynesfdaq?]'
1045 'record': _('[Ynesfdaq?]'
1046 '$$ &Yes, record this change'
1046 '$$ &Yes, record this change'
1047 '$$ &No, skip this change'
1047 '$$ &No, skip this change'
1048 '$$ &Edit this change manually'
1048 '$$ &Edit this change manually'
1049 '$$ &Skip remaining changes to this file'
1049 '$$ &Skip remaining changes to this file'
1050 '$$ Record remaining changes to this &file'
1050 '$$ Record remaining changes to this &file'
1051 '$$ &Done, skip remaining changes and files'
1051 '$$ &Done, skip remaining changes and files'
1052 '$$ Record &all changes to all remaining files'
1052 '$$ Record &all changes to all remaining files'
1053 '$$ &Quit, recording no changes'
1053 '$$ &Quit, recording no changes'
1054 '$$ &? (display help)'),
1054 '$$ &? (display help)'),
1055 }
1055 }
1056 }
1056 }
1057
1057
1058 def filterpatch(ui, headers, operation=None):
1058 def filterpatch(ui, headers, operation=None):
1059 """Interactively filter patch chunks into applied-only chunks"""
1059 """Interactively filter patch chunks into applied-only chunks"""
1060 messages = getmessages()
1060 messages = getmessages()
1061
1061
1062 if operation is None:
1062 if operation is None:
1063 operation = 'record'
1063 operation = 'record'
1064
1064
1065 def prompt(skipfile, skipall, query, chunk):
1065 def prompt(skipfile, skipall, query, chunk):
1066 """prompt query, and process base inputs
1066 """prompt query, and process base inputs
1067
1067
1068 - y/n for the rest of file
1068 - y/n for the rest of file
1069 - y/n for the rest
1069 - y/n for the rest
1070 - ? (help)
1070 - ? (help)
1071 - q (quit)
1071 - q (quit)
1072
1072
1073 Return True/False and possibly updated skipfile and skipall.
1073 Return True/False and possibly updated skipfile and skipall.
1074 """
1074 """
1075 newpatches = None
1075 newpatches = None
1076 if skipall is not None:
1076 if skipall is not None:
1077 return skipall, skipfile, skipall, newpatches
1077 return skipall, skipfile, skipall, newpatches
1078 if skipfile is not None:
1078 if skipfile is not None:
1079 return skipfile, skipfile, skipall, newpatches
1079 return skipfile, skipfile, skipall, newpatches
1080 while True:
1080 while True:
1081 resps = messages['help'][operation]
1081 resps = messages['help'][operation]
1082 r = ui.promptchoice("%s %s" % (query, resps))
1082 r = ui.promptchoice("%s %s" % (query, resps))
1083 ui.write("\n")
1083 ui.write("\n")
1084 if r == 8: # ?
1084 if r == 8: # ?
1085 for c, t in ui.extractchoices(resps)[1]:
1085 for c, t in ui.extractchoices(resps)[1]:
1086 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1086 ui.write('%s - %s\n' % (c, encoding.lower(t)))
1087 continue
1087 continue
1088 elif r == 0: # yes
1088 elif r == 0: # yes
1089 ret = True
1089 ret = True
1090 elif r == 1: # no
1090 elif r == 1: # no
1091 ret = False
1091 ret = False
1092 elif r == 2: # Edit patch
1092 elif r == 2: # Edit patch
1093 if chunk is None:
1093 if chunk is None:
1094 ui.write(_('cannot edit patch for whole file'))
1094 ui.write(_('cannot edit patch for whole file'))
1095 ui.write("\n")
1095 ui.write("\n")
1096 continue
1096 continue
1097 if chunk.header.binary():
1097 if chunk.header.binary():
1098 ui.write(_('cannot edit patch for binary file'))
1098 ui.write(_('cannot edit patch for binary file'))
1099 ui.write("\n")
1099 ui.write("\n")
1100 continue
1100 continue
1101 # Patch comment based on the Git one (based on comment at end of
1101 # Patch comment based on the Git one (based on comment at end of
1102 # https://mercurial-scm.org/wiki/RecordExtension)
1102 # https://mercurial-scm.org/wiki/RecordExtension)
1103 phelp = '---' + _("""
1103 phelp = '---' + _("""
1104 To remove '-' lines, make them ' ' lines (context).
1104 To remove '-' lines, make them ' ' lines (context).
1105 To remove '+' lines, delete them.
1105 To remove '+' lines, delete them.
1106 Lines starting with # will be removed from the patch.
1106 Lines starting with # will be removed from the patch.
1107
1107
1108 If the patch applies cleanly, the edited hunk will immediately be
1108 If the patch applies cleanly, the edited hunk will immediately be
1109 added to the record list. If it does not apply cleanly, a rejects
1109 added to the record list. If it does not apply cleanly, a rejects
1110 file will be generated: you can use that when you try again. If
1110 file will be generated: you can use that when you try again. If
1111 all lines of the hunk are removed, then the edit is aborted and
1111 all lines of the hunk are removed, then the edit is aborted and
1112 the hunk is left unchanged.
1112 the hunk is left unchanged.
1113 """)
1113 """)
1114 (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
1114 (patchfd, patchfn) = pycompat.mkstemp(prefix="hg-editor-",
1115 suffix=".diff")
1115 suffix=".diff")
1116 ncpatchfp = None
1116 ncpatchfp = None
1117 try:
1117 try:
1118 # Write the initial patch
1118 # Write the initial patch
1119 f = util.nativeeolwriter(os.fdopen(patchfd, r'wb'))
1119 f = util.nativeeolwriter(os.fdopen(patchfd, r'wb'))
1120 chunk.header.write(f)
1120 chunk.header.write(f)
1121 chunk.write(f)
1121 chunk.write(f)
1122 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1122 f.write('\n'.join(['# ' + i for i in phelp.splitlines()]))
1123 f.close()
1123 f.close()
1124 # Start the editor and wait for it to complete
1124 # Start the editor and wait for it to complete
1125 editor = ui.geteditor()
1125 editor = ui.geteditor()
1126 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1126 ret = ui.system("%s \"%s\"" % (editor, patchfn),
1127 environ={'HGUSER': ui.username()},
1127 environ={'HGUSER': ui.username()},
1128 blockedtag='filterpatch')
1128 blockedtag='filterpatch')
1129 if ret != 0:
1129 if ret != 0:
1130 ui.warn(_("editor exited with exit code %d\n") % ret)
1130 ui.warn(_("editor exited with exit code %d\n") % ret)
1131 continue
1131 continue
1132 # Remove comment lines
1132 # Remove comment lines
1133 patchfp = open(patchfn, r'rb')
1133 patchfp = open(patchfn, r'rb')
1134 ncpatchfp = stringio()
1134 ncpatchfp = stringio()
1135 for line in util.iterfile(patchfp):
1135 for line in util.iterfile(patchfp):
1136 line = util.fromnativeeol(line)
1136 line = util.fromnativeeol(line)
1137 if not line.startswith('#'):
1137 if not line.startswith('#'):
1138 ncpatchfp.write(line)
1138 ncpatchfp.write(line)
1139 patchfp.close()
1139 patchfp.close()
1140 ncpatchfp.seek(0)
1140 ncpatchfp.seek(0)
1141 newpatches = parsepatch(ncpatchfp)
1141 newpatches = parsepatch(ncpatchfp)
1142 finally:
1142 finally:
1143 os.unlink(patchfn)
1143 os.unlink(patchfn)
1144 del ncpatchfp
1144 del ncpatchfp
1145 # Signal that the chunk shouldn't be applied as-is, but
1145 # Signal that the chunk shouldn't be applied as-is, but
1146 # provide the new patch to be used instead.
1146 # provide the new patch to be used instead.
1147 ret = False
1147 ret = False
1148 elif r == 3: # Skip
1148 elif r == 3: # Skip
1149 ret = skipfile = False
1149 ret = skipfile = False
1150 elif r == 4: # file (Record remaining)
1150 elif r == 4: # file (Record remaining)
1151 ret = skipfile = True
1151 ret = skipfile = True
1152 elif r == 5: # done, skip remaining
1152 elif r == 5: # done, skip remaining
1153 ret = skipall = False
1153 ret = skipall = False
1154 elif r == 6: # all
1154 elif r == 6: # all
1155 ret = skipall = True
1155 ret = skipall = True
1156 elif r == 7: # quit
1156 elif r == 7: # quit
1157 raise error.Abort(_('user quit'))
1157 raise error.Abort(_('user quit'))
1158 return ret, skipfile, skipall, newpatches
1158 return ret, skipfile, skipall, newpatches
1159
1159
1160 seen = set()
1160 seen = set()
1161 applied = {} # 'filename' -> [] of chunks
1161 applied = {} # 'filename' -> [] of chunks
1162 skipfile, skipall = None, None
1162 skipfile, skipall = None, None
1163 pos, total = 1, sum(len(h.hunks) for h in headers)
1163 pos, total = 1, sum(len(h.hunks) for h in headers)
1164 for h in headers:
1164 for h in headers:
1165 pos += len(h.hunks)
1165 pos += len(h.hunks)
1166 skipfile = None
1166 skipfile = None
1167 fixoffset = 0
1167 fixoffset = 0
1168 hdr = ''.join(h.header)
1168 hdr = ''.join(h.header)
1169 if hdr in seen:
1169 if hdr in seen:
1170 continue
1170 continue
1171 seen.add(hdr)
1171 seen.add(hdr)
1172 if skipall is None:
1172 if skipall is None:
1173 h.pretty(ui)
1173 h.pretty(ui)
1174 msg = (_('examine changes to %s?') %
1174 msg = (_('examine changes to %s?') %
1175 _(' and ').join("'%s'" % f for f in h.files()))
1175 _(' and ').join("'%s'" % f for f in h.files()))
1176 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1176 r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None)
1177 if not r:
1177 if not r:
1178 continue
1178 continue
1179 applied[h.filename()] = [h]
1179 applied[h.filename()] = [h]
1180 if h.allhunks():
1180 if h.allhunks():
1181 applied[h.filename()] += h.hunks
1181 applied[h.filename()] += h.hunks
1182 continue
1182 continue
1183 for i, chunk in enumerate(h.hunks):
1183 for i, chunk in enumerate(h.hunks):
1184 if skipfile is None and skipall is None:
1184 if skipfile is None and skipall is None:
1185 chunk.pretty(ui)
1185 chunk.pretty(ui)
1186 if total == 1:
1186 if total == 1:
1187 msg = messages['single'][operation] % chunk.filename()
1187 msg = messages['single'][operation] % chunk.filename()
1188 else:
1188 else:
1189 idx = pos - len(h.hunks) + i
1189 idx = pos - len(h.hunks) + i
1190 msg = messages['multiple'][operation] % (idx, total,
1190 msg = messages['multiple'][operation] % (idx, total,
1191 chunk.filename())
1191 chunk.filename())
1192 r, skipfile, skipall, newpatches = prompt(skipfile,
1192 r, skipfile, skipall, newpatches = prompt(skipfile,
1193 skipall, msg, chunk)
1193 skipall, msg, chunk)
1194 if r:
1194 if r:
1195 if fixoffset:
1195 if fixoffset:
1196 chunk = copy.copy(chunk)
1196 chunk = copy.copy(chunk)
1197 chunk.toline += fixoffset
1197 chunk.toline += fixoffset
1198 applied[chunk.filename()].append(chunk)
1198 applied[chunk.filename()].append(chunk)
1199 elif newpatches is not None:
1199 elif newpatches is not None:
1200 for newpatch in newpatches:
1200 for newpatch in newpatches:
1201 for newhunk in newpatch.hunks:
1201 for newhunk in newpatch.hunks:
1202 if fixoffset:
1202 if fixoffset:
1203 newhunk.toline += fixoffset
1203 newhunk.toline += fixoffset
1204 applied[newhunk.filename()].append(newhunk)
1204 applied[newhunk.filename()].append(newhunk)
1205 else:
1205 else:
1206 fixoffset += chunk.removed - chunk.added
1206 fixoffset += chunk.removed - chunk.added
1207 return (sum([h for h in applied.itervalues()
1207 return (sum([h for h in applied.itervalues()
1208 if h[0].special() or len(h) > 1], []), {})
1208 if h[0].special() or len(h) > 1], []), {})
1209 class hunk(object):
1209 class hunk(object):
1210 def __init__(self, desc, num, lr, context):
1210 def __init__(self, desc, num, lr, context):
1211 self.number = num
1211 self.number = num
1212 self.desc = desc
1212 self.desc = desc
1213 self.hunk = [desc]
1213 self.hunk = [desc]
1214 self.a = []
1214 self.a = []
1215 self.b = []
1215 self.b = []
1216 self.starta = self.lena = None
1216 self.starta = self.lena = None
1217 self.startb = self.lenb = None
1217 self.startb = self.lenb = None
1218 if lr is not None:
1218 if lr is not None:
1219 if context:
1219 if context:
1220 self.read_context_hunk(lr)
1220 self.read_context_hunk(lr)
1221 else:
1221 else:
1222 self.read_unified_hunk(lr)
1222 self.read_unified_hunk(lr)
1223
1223
1224 def getnormalized(self):
1224 def getnormalized(self):
1225 """Return a copy with line endings normalized to LF."""
1225 """Return a copy with line endings normalized to LF."""
1226
1226
1227 def normalize(lines):
1227 def normalize(lines):
1228 nlines = []
1228 nlines = []
1229 for line in lines:
1229 for line in lines:
1230 if line.endswith('\r\n'):
1230 if line.endswith('\r\n'):
1231 line = line[:-2] + '\n'
1231 line = line[:-2] + '\n'
1232 nlines.append(line)
1232 nlines.append(line)
1233 return nlines
1233 return nlines
1234
1234
1235 # Dummy object, it is rebuilt manually
1235 # Dummy object, it is rebuilt manually
1236 nh = hunk(self.desc, self.number, None, None)
1236 nh = hunk(self.desc, self.number, None, None)
1237 nh.number = self.number
1237 nh.number = self.number
1238 nh.desc = self.desc
1238 nh.desc = self.desc
1239 nh.hunk = self.hunk
1239 nh.hunk = self.hunk
1240 nh.a = normalize(self.a)
1240 nh.a = normalize(self.a)
1241 nh.b = normalize(self.b)
1241 nh.b = normalize(self.b)
1242 nh.starta = self.starta
1242 nh.starta = self.starta
1243 nh.startb = self.startb
1243 nh.startb = self.startb
1244 nh.lena = self.lena
1244 nh.lena = self.lena
1245 nh.lenb = self.lenb
1245 nh.lenb = self.lenb
1246 return nh
1246 return nh
1247
1247
1248 def read_unified_hunk(self, lr):
1248 def read_unified_hunk(self, lr):
1249 m = unidesc.match(self.desc)
1249 m = unidesc.match(self.desc)
1250 if not m:
1250 if not m:
1251 raise PatchError(_("bad hunk #%d") % self.number)
1251 raise PatchError(_("bad hunk #%d") % self.number)
1252 self.starta, self.lena, self.startb, self.lenb = m.groups()
1252 self.starta, self.lena, self.startb, self.lenb = m.groups()
1253 if self.lena is None:
1253 if self.lena is None:
1254 self.lena = 1
1254 self.lena = 1
1255 else:
1255 else:
1256 self.lena = int(self.lena)
1256 self.lena = int(self.lena)
1257 if self.lenb is None:
1257 if self.lenb is None:
1258 self.lenb = 1
1258 self.lenb = 1
1259 else:
1259 else:
1260 self.lenb = int(self.lenb)
1260 self.lenb = int(self.lenb)
1261 self.starta = int(self.starta)
1261 self.starta = int(self.starta)
1262 self.startb = int(self.startb)
1262 self.startb = int(self.startb)
1263 try:
1263 try:
1264 diffhelper.addlines(lr, self.hunk, self.lena, self.lenb,
1264 diffhelper.addlines(lr, self.hunk, self.lena, self.lenb,
1265 self.a, self.b)
1265 self.a, self.b)
1266 except error.ParseError as e:
1266 except error.ParseError as e:
1267 raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
1267 raise PatchError(_("bad hunk #%d: %s") % (self.number, e))
1268 # if we hit eof before finishing out the hunk, the last line will
1268 # if we hit eof before finishing out the hunk, the last line will
1269 # be zero length. Lets try to fix it up.
1269 # be zero length. Lets try to fix it up.
1270 while len(self.hunk[-1]) == 0:
1270 while len(self.hunk[-1]) == 0:
1271 del self.hunk[-1]
1271 del self.hunk[-1]
1272 del self.a[-1]
1272 del self.a[-1]
1273 del self.b[-1]
1273 del self.b[-1]
1274 self.lena -= 1
1274 self.lena -= 1
1275 self.lenb -= 1
1275 self.lenb -= 1
1276 self._fixnewline(lr)
1276 self._fixnewline(lr)
1277
1277
1278 def read_context_hunk(self, lr):
1278 def read_context_hunk(self, lr):
1279 self.desc = lr.readline()
1279 self.desc = lr.readline()
1280 m = contextdesc.match(self.desc)
1280 m = contextdesc.match(self.desc)
1281 if not m:
1281 if not m:
1282 raise PatchError(_("bad hunk #%d") % self.number)
1282 raise PatchError(_("bad hunk #%d") % self.number)
1283 self.starta, aend = m.groups()
1283 self.starta, aend = m.groups()
1284 self.starta = int(self.starta)
1284 self.starta = int(self.starta)
1285 if aend is None:
1285 if aend is None:
1286 aend = self.starta
1286 aend = self.starta
1287 self.lena = int(aend) - self.starta
1287 self.lena = int(aend) - self.starta
1288 if self.starta:
1288 if self.starta:
1289 self.lena += 1
1289 self.lena += 1
1290 for x in pycompat.xrange(self.lena):
1290 for x in pycompat.xrange(self.lena):
1291 l = lr.readline()
1291 l = lr.readline()
1292 if l.startswith('---'):
1292 if l.startswith('---'):
1293 # lines addition, old block is empty
1293 # lines addition, old block is empty
1294 lr.push(l)
1294 lr.push(l)
1295 break
1295 break
1296 s = l[2:]
1296 s = l[2:]
1297 if l.startswith('- ') or l.startswith('! '):
1297 if l.startswith('- ') or l.startswith('! '):
1298 u = '-' + s
1298 u = '-' + s
1299 elif l.startswith(' '):
1299 elif l.startswith(' '):
1300 u = ' ' + s
1300 u = ' ' + s
1301 else:
1301 else:
1302 raise PatchError(_("bad hunk #%d old text line %d") %
1302 raise PatchError(_("bad hunk #%d old text line %d") %
1303 (self.number, x))
1303 (self.number, x))
1304 self.a.append(u)
1304 self.a.append(u)
1305 self.hunk.append(u)
1305 self.hunk.append(u)
1306
1306
1307 l = lr.readline()
1307 l = lr.readline()
1308 if l.startswith('\ '):
1308 if l.startswith('\ '):
1309 s = self.a[-1][:-1]
1309 s = self.a[-1][:-1]
1310 self.a[-1] = s
1310 self.a[-1] = s
1311 self.hunk[-1] = s
1311 self.hunk[-1] = s
1312 l = lr.readline()
1312 l = lr.readline()
1313 m = contextdesc.match(l)
1313 m = contextdesc.match(l)
1314 if not m:
1314 if not m:
1315 raise PatchError(_("bad hunk #%d") % self.number)
1315 raise PatchError(_("bad hunk #%d") % self.number)
1316 self.startb, bend = m.groups()
1316 self.startb, bend = m.groups()
1317 self.startb = int(self.startb)
1317 self.startb = int(self.startb)
1318 if bend is None:
1318 if bend is None:
1319 bend = self.startb
1319 bend = self.startb
1320 self.lenb = int(bend) - self.startb
1320 self.lenb = int(bend) - self.startb
1321 if self.startb:
1321 if self.startb:
1322 self.lenb += 1
1322 self.lenb += 1
1323 hunki = 1
1323 hunki = 1
1324 for x in pycompat.xrange(self.lenb):
1324 for x in pycompat.xrange(self.lenb):
1325 l = lr.readline()
1325 l = lr.readline()
1326 if l.startswith('\ '):
1326 if l.startswith('\ '):
1327 # XXX: the only way to hit this is with an invalid line range.
1327 # XXX: the only way to hit this is with an invalid line range.
1328 # The no-eol marker is not counted in the line range, but I
1328 # The no-eol marker is not counted in the line range, but I
1329 # guess there are diff(1) out there which behave differently.
1329 # guess there are diff(1) out there which behave differently.
1330 s = self.b[-1][:-1]
1330 s = self.b[-1][:-1]
1331 self.b[-1] = s
1331 self.b[-1] = s
1332 self.hunk[hunki - 1] = s
1332 self.hunk[hunki - 1] = s
1333 continue
1333 continue
1334 if not l:
1334 if not l:
1335 # line deletions, new block is empty and we hit EOF
1335 # line deletions, new block is empty and we hit EOF
1336 lr.push(l)
1336 lr.push(l)
1337 break
1337 break
1338 s = l[2:]
1338 s = l[2:]
1339 if l.startswith('+ ') or l.startswith('! '):
1339 if l.startswith('+ ') or l.startswith('! '):
1340 u = '+' + s
1340 u = '+' + s
1341 elif l.startswith(' '):
1341 elif l.startswith(' '):
1342 u = ' ' + s
1342 u = ' ' + s
1343 elif len(self.b) == 0:
1343 elif len(self.b) == 0:
1344 # line deletions, new block is empty
1344 # line deletions, new block is empty
1345 lr.push(l)
1345 lr.push(l)
1346 break
1346 break
1347 else:
1347 else:
1348 raise PatchError(_("bad hunk #%d old text line %d") %
1348 raise PatchError(_("bad hunk #%d old text line %d") %
1349 (self.number, x))
1349 (self.number, x))
1350 self.b.append(s)
1350 self.b.append(s)
1351 while True:
1351 while True:
1352 if hunki >= len(self.hunk):
1352 if hunki >= len(self.hunk):
1353 h = ""
1353 h = ""
1354 else:
1354 else:
1355 h = self.hunk[hunki]
1355 h = self.hunk[hunki]
1356 hunki += 1
1356 hunki += 1
1357 if h == u:
1357 if h == u:
1358 break
1358 break
1359 elif h.startswith('-'):
1359 elif h.startswith('-'):
1360 continue
1360 continue
1361 else:
1361 else:
1362 self.hunk.insert(hunki - 1, u)
1362 self.hunk.insert(hunki - 1, u)
1363 break
1363 break
1364
1364
1365 if not self.a:
1365 if not self.a:
1366 # this happens when lines were only added to the hunk
1366 # this happens when lines were only added to the hunk
1367 for x in self.hunk:
1367 for x in self.hunk:
1368 if x.startswith('-') or x.startswith(' '):
1368 if x.startswith('-') or x.startswith(' '):
1369 self.a.append(x)
1369 self.a.append(x)
1370 if not self.b:
1370 if not self.b:
1371 # this happens when lines were only deleted from the hunk
1371 # this happens when lines were only deleted from the hunk
1372 for x in self.hunk:
1372 for x in self.hunk:
1373 if x.startswith('+') or x.startswith(' '):
1373 if x.startswith('+') or x.startswith(' '):
1374 self.b.append(x[1:])
1374 self.b.append(x[1:])
1375 # @@ -start,len +start,len @@
1375 # @@ -start,len +start,len @@
1376 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1376 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
1377 self.startb, self.lenb)
1377 self.startb, self.lenb)
1378 self.hunk[0] = self.desc
1378 self.hunk[0] = self.desc
1379 self._fixnewline(lr)
1379 self._fixnewline(lr)
1380
1380
1381 def _fixnewline(self, lr):
1381 def _fixnewline(self, lr):
1382 l = lr.readline()
1382 l = lr.readline()
1383 if l.startswith('\ '):
1383 if l.startswith('\ '):
1384 diffhelper.fixnewline(self.hunk, self.a, self.b)
1384 diffhelper.fixnewline(self.hunk, self.a, self.b)
1385 else:
1385 else:
1386 lr.push(l)
1386 lr.push(l)
1387
1387
1388 def complete(self):
1388 def complete(self):
1389 return len(self.a) == self.lena and len(self.b) == self.lenb
1389 return len(self.a) == self.lena and len(self.b) == self.lenb
1390
1390
1391 def _fuzzit(self, old, new, fuzz, toponly):
1391 def _fuzzit(self, old, new, fuzz, toponly):
1392 # this removes context lines from the top and bottom of list 'l'. It
1392 # this removes context lines from the top and bottom of list 'l'. It
1393 # checks the hunk to make sure only context lines are removed, and then
1393 # checks the hunk to make sure only context lines are removed, and then
1394 # returns a new shortened list of lines.
1394 # returns a new shortened list of lines.
1395 fuzz = min(fuzz, len(old))
1395 fuzz = min(fuzz, len(old))
1396 if fuzz:
1396 if fuzz:
1397 top = 0
1397 top = 0
1398 bot = 0
1398 bot = 0
1399 hlen = len(self.hunk)
1399 hlen = len(self.hunk)
1400 for x in pycompat.xrange(hlen - 1):
1400 for x in pycompat.xrange(hlen - 1):
1401 # the hunk starts with the @@ line, so use x+1
1401 # the hunk starts with the @@ line, so use x+1
1402 if self.hunk[x + 1].startswith(' '):
1402 if self.hunk[x + 1].startswith(' '):
1403 top += 1
1403 top += 1
1404 else:
1404 else:
1405 break
1405 break
1406 if not toponly:
1406 if not toponly:
1407 for x in pycompat.xrange(hlen - 1):
1407 for x in pycompat.xrange(hlen - 1):
1408 if self.hunk[hlen - bot - 1].startswith(' '):
1408 if self.hunk[hlen - bot - 1].startswith(' '):
1409 bot += 1
1409 bot += 1
1410 else:
1410 else:
1411 break
1411 break
1412
1412
1413 bot = min(fuzz, bot)
1413 bot = min(fuzz, bot)
1414 top = min(fuzz, top)
1414 top = min(fuzz, top)
1415 return old[top:len(old) - bot], new[top:len(new) - bot], top
1415 return old[top:len(old) - bot], new[top:len(new) - bot], top
1416 return old, new, 0
1416 return old, new, 0
1417
1417
1418 def fuzzit(self, fuzz, toponly):
1418 def fuzzit(self, fuzz, toponly):
1419 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1419 old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
1420 oldstart = self.starta + top
1420 oldstart = self.starta + top
1421 newstart = self.startb + top
1421 newstart = self.startb + top
1422 # zero length hunk ranges already have their start decremented
1422 # zero length hunk ranges already have their start decremented
1423 if self.lena and oldstart > 0:
1423 if self.lena and oldstart > 0:
1424 oldstart -= 1
1424 oldstart -= 1
1425 if self.lenb and newstart > 0:
1425 if self.lenb and newstart > 0:
1426 newstart -= 1
1426 newstart -= 1
1427 return old, oldstart, new, newstart
1427 return old, oldstart, new, newstart
1428
1428
1429 class binhunk(object):
1429 class binhunk(object):
1430 'A binary patch file.'
1430 'A binary patch file.'
1431 def __init__(self, lr, fname):
1431 def __init__(self, lr, fname):
1432 self.text = None
1432 self.text = None
1433 self.delta = False
1433 self.delta = False
1434 self.hunk = ['GIT binary patch\n']
1434 self.hunk = ['GIT binary patch\n']
1435 self._fname = fname
1435 self._fname = fname
1436 self._read(lr)
1436 self._read(lr)
1437
1437
1438 def complete(self):
1438 def complete(self):
1439 return self.text is not None
1439 return self.text is not None
1440
1440
1441 def new(self, lines):
1441 def new(self, lines):
1442 if self.delta:
1442 if self.delta:
1443 return [applybindelta(self.text, ''.join(lines))]
1443 return [applybindelta(self.text, ''.join(lines))]
1444 return [self.text]
1444 return [self.text]
1445
1445
1446 def _read(self, lr):
1446 def _read(self, lr):
1447 def getline(lr, hunk):
1447 def getline(lr, hunk):
1448 l = lr.readline()
1448 l = lr.readline()
1449 hunk.append(l)
1449 hunk.append(l)
1450 return l.rstrip('\r\n')
1450 return l.rstrip('\r\n')
1451
1451
1452 while True:
1452 while True:
1453 line = getline(lr, self.hunk)
1453 line = getline(lr, self.hunk)
1454 if not line:
1454 if not line:
1455 raise PatchError(_('could not extract "%s" binary data')
1455 raise PatchError(_('could not extract "%s" binary data')
1456 % self._fname)
1456 % self._fname)
1457 if line.startswith('literal '):
1457 if line.startswith('literal '):
1458 size = int(line[8:].rstrip())
1458 size = int(line[8:].rstrip())
1459 break
1459 break
1460 if line.startswith('delta '):
1460 if line.startswith('delta '):
1461 size = int(line[6:].rstrip())
1461 size = int(line[6:].rstrip())
1462 self.delta = True
1462 self.delta = True
1463 break
1463 break
1464 dec = []
1464 dec = []
1465 line = getline(lr, self.hunk)
1465 line = getline(lr, self.hunk)
1466 while len(line) > 1:
1466 while len(line) > 1:
1467 l = line[0:1]
1467 l = line[0:1]
1468 if l <= 'Z' and l >= 'A':
1468 if l <= 'Z' and l >= 'A':
1469 l = ord(l) - ord('A') + 1
1469 l = ord(l) - ord('A') + 1
1470 else:
1470 else:
1471 l = ord(l) - ord('a') + 27
1471 l = ord(l) - ord('a') + 27
1472 try:
1472 try:
1473 dec.append(util.b85decode(line[1:])[:l])
1473 dec.append(util.b85decode(line[1:])[:l])
1474 except ValueError as e:
1474 except ValueError as e:
1475 raise PatchError(_('could not decode "%s" binary patch: %s')
1475 raise PatchError(_('could not decode "%s" binary patch: %s')
1476 % (self._fname, stringutil.forcebytestr(e)))
1476 % (self._fname, stringutil.forcebytestr(e)))
1477 line = getline(lr, self.hunk)
1477 line = getline(lr, self.hunk)
1478 text = zlib.decompress(''.join(dec))
1478 text = zlib.decompress(''.join(dec))
1479 if len(text) != size:
1479 if len(text) != size:
1480 raise PatchError(_('"%s" length is %d bytes, should be %d')
1480 raise PatchError(_('"%s" length is %d bytes, should be %d')
1481 % (self._fname, len(text), size))
1481 % (self._fname, len(text), size))
1482 self.text = text
1482 self.text = text
1483
1483
1484 def parsefilename(str):
1484 def parsefilename(str):
1485 # --- filename \t|space stuff
1485 # --- filename \t|space stuff
1486 s = str[4:].rstrip('\r\n')
1486 s = str[4:].rstrip('\r\n')
1487 i = s.find('\t')
1487 i = s.find('\t')
1488 if i < 0:
1488 if i < 0:
1489 i = s.find(' ')
1489 i = s.find(' ')
1490 if i < 0:
1490 if i < 0:
1491 return s
1491 return s
1492 return s[:i]
1492 return s[:i]
1493
1493
1494 def reversehunks(hunks):
1494 def reversehunks(hunks):
1495 '''reverse the signs in the hunks given as argument
1495 '''reverse the signs in the hunks given as argument
1496
1496
1497 This function operates on hunks coming out of patch.filterpatch, that is
1497 This function operates on hunks coming out of patch.filterpatch, that is
1498 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1498 a list of the form: [header1, hunk1, hunk2, header2...]. Example usage:
1499
1499
1500 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1500 >>> rawpatch = b"""diff --git a/folder1/g b/folder1/g
1501 ... --- a/folder1/g
1501 ... --- a/folder1/g
1502 ... +++ b/folder1/g
1502 ... +++ b/folder1/g
1503 ... @@ -1,7 +1,7 @@
1503 ... @@ -1,7 +1,7 @@
1504 ... +firstline
1504 ... +firstline
1505 ... c
1505 ... c
1506 ... 1
1506 ... 1
1507 ... 2
1507 ... 2
1508 ... + 3
1508 ... + 3
1509 ... -4
1509 ... -4
1510 ... 5
1510 ... 5
1511 ... d
1511 ... d
1512 ... +lastline"""
1512 ... +lastline"""
1513 >>> hunks = parsepatch([rawpatch])
1513 >>> hunks = parsepatch([rawpatch])
1514 >>> hunkscomingfromfilterpatch = []
1514 >>> hunkscomingfromfilterpatch = []
1515 >>> for h in hunks:
1515 >>> for h in hunks:
1516 ... hunkscomingfromfilterpatch.append(h)
1516 ... hunkscomingfromfilterpatch.append(h)
1517 ... hunkscomingfromfilterpatch.extend(h.hunks)
1517 ... hunkscomingfromfilterpatch.extend(h.hunks)
1518
1518
1519 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1519 >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch)
1520 >>> from . import util
1520 >>> from . import util
1521 >>> fp = util.stringio()
1521 >>> fp = util.stringio()
1522 >>> for c in reversedhunks:
1522 >>> for c in reversedhunks:
1523 ... c.write(fp)
1523 ... c.write(fp)
1524 >>> fp.seek(0) or None
1524 >>> fp.seek(0) or None
1525 >>> reversedpatch = fp.read()
1525 >>> reversedpatch = fp.read()
1526 >>> print(pycompat.sysstr(reversedpatch))
1526 >>> print(pycompat.sysstr(reversedpatch))
1527 diff --git a/folder1/g b/folder1/g
1527 diff --git a/folder1/g b/folder1/g
1528 --- a/folder1/g
1528 --- a/folder1/g
1529 +++ b/folder1/g
1529 +++ b/folder1/g
1530 @@ -1,4 +1,3 @@
1530 @@ -1,4 +1,3 @@
1531 -firstline
1531 -firstline
1532 c
1532 c
1533 1
1533 1
1534 2
1534 2
1535 @@ -2,6 +1,6 @@
1535 @@ -2,6 +1,6 @@
1536 c
1536 c
1537 1
1537 1
1538 2
1538 2
1539 - 3
1539 - 3
1540 +4
1540 +4
1541 5
1541 5
1542 d
1542 d
1543 @@ -6,3 +5,2 @@
1543 @@ -6,3 +5,2 @@
1544 5
1544 5
1545 d
1545 d
1546 -lastline
1546 -lastline
1547
1547
1548 '''
1548 '''
1549
1549
1550 newhunks = []
1550 newhunks = []
1551 for c in hunks:
1551 for c in hunks:
1552 if util.safehasattr(c, 'reversehunk'):
1552 if util.safehasattr(c, 'reversehunk'):
1553 c = c.reversehunk()
1553 c = c.reversehunk()
1554 newhunks.append(c)
1554 newhunks.append(c)
1555 return newhunks
1555 return newhunks
1556
1556
1557 def parsepatch(originalchunks, maxcontext=None):
1557 def parsepatch(originalchunks, maxcontext=None):
1558 """patch -> [] of headers -> [] of hunks
1558 """patch -> [] of headers -> [] of hunks
1559
1559
1560 If maxcontext is not None, trim context lines if necessary.
1560 If maxcontext is not None, trim context lines if necessary.
1561
1561
1562 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1562 >>> rawpatch = b'''diff --git a/folder1/g b/folder1/g
1563 ... --- a/folder1/g
1563 ... --- a/folder1/g
1564 ... +++ b/folder1/g
1564 ... +++ b/folder1/g
1565 ... @@ -1,8 +1,10 @@
1565 ... @@ -1,8 +1,10 @@
1566 ... 1
1566 ... 1
1567 ... 2
1567 ... 2
1568 ... -3
1568 ... -3
1569 ... 4
1569 ... 4
1570 ... 5
1570 ... 5
1571 ... 6
1571 ... 6
1572 ... +6.1
1572 ... +6.1
1573 ... +6.2
1573 ... +6.2
1574 ... 7
1574 ... 7
1575 ... 8
1575 ... 8
1576 ... +9'''
1576 ... +9'''
1577 >>> out = util.stringio()
1577 >>> out = util.stringio()
1578 >>> headers = parsepatch([rawpatch], maxcontext=1)
1578 >>> headers = parsepatch([rawpatch], maxcontext=1)
1579 >>> for header in headers:
1579 >>> for header in headers:
1580 ... header.write(out)
1580 ... header.write(out)
1581 ... for hunk in header.hunks:
1581 ... for hunk in header.hunks:
1582 ... hunk.write(out)
1582 ... hunk.write(out)
1583 >>> print(pycompat.sysstr(out.getvalue()))
1583 >>> print(pycompat.sysstr(out.getvalue()))
1584 diff --git a/folder1/g b/folder1/g
1584 diff --git a/folder1/g b/folder1/g
1585 --- a/folder1/g
1585 --- a/folder1/g
1586 +++ b/folder1/g
1586 +++ b/folder1/g
1587 @@ -2,3 +2,2 @@
1587 @@ -2,3 +2,2 @@
1588 2
1588 2
1589 -3
1589 -3
1590 4
1590 4
1591 @@ -6,2 +5,4 @@
1591 @@ -6,2 +5,4 @@
1592 6
1592 6
1593 +6.1
1593 +6.1
1594 +6.2
1594 +6.2
1595 7
1595 7
1596 @@ -8,1 +9,2 @@
1596 @@ -8,1 +9,2 @@
1597 8
1597 8
1598 +9
1598 +9
1599 """
1599 """
1600 class parser(object):
1600 class parser(object):
1601 """patch parsing state machine"""
1601 """patch parsing state machine"""
1602 def __init__(self):
1602 def __init__(self):
1603 self.fromline = 0
1603 self.fromline = 0
1604 self.toline = 0
1604 self.toline = 0
1605 self.proc = ''
1605 self.proc = ''
1606 self.header = None
1606 self.header = None
1607 self.context = []
1607 self.context = []
1608 self.before = []
1608 self.before = []
1609 self.hunk = []
1609 self.hunk = []
1610 self.headers = []
1610 self.headers = []
1611
1611
1612 def addrange(self, limits):
1612 def addrange(self, limits):
1613 self.addcontext([])
1613 self.addcontext([])
1614 fromstart, fromend, tostart, toend, proc = limits
1614 fromstart, fromend, tostart, toend, proc = limits
1615 self.fromline = int(fromstart)
1615 self.fromline = int(fromstart)
1616 self.toline = int(tostart)
1616 self.toline = int(tostart)
1617 self.proc = proc
1617 self.proc = proc
1618
1618
1619 def addcontext(self, context):
1619 def addcontext(self, context):
1620 if self.hunk:
1620 if self.hunk:
1621 h = recordhunk(self.header, self.fromline, self.toline,
1621 h = recordhunk(self.header, self.fromline, self.toline,
1622 self.proc, self.before, self.hunk, context, maxcontext)
1622 self.proc, self.before, self.hunk, context, maxcontext)
1623 self.header.hunks.append(h)
1623 self.header.hunks.append(h)
1624 self.fromline += len(self.before) + h.removed
1624 self.fromline += len(self.before) + h.removed
1625 self.toline += len(self.before) + h.added
1625 self.toline += len(self.before) + h.added
1626 self.before = []
1626 self.before = []
1627 self.hunk = []
1627 self.hunk = []
1628 self.context = context
1628 self.context = context
1629
1629
1630 def addhunk(self, hunk):
1630 def addhunk(self, hunk):
1631 if self.context:
1631 if self.context:
1632 self.before = self.context
1632 self.before = self.context
1633 self.context = []
1633 self.context = []
1634 if self.hunk:
1634 if self.hunk:
1635 self.addcontext([])
1635 self.addcontext([])
1636 self.hunk = hunk
1636 self.hunk = hunk
1637
1637
1638 def newfile(self, hdr):
1638 def newfile(self, hdr):
1639 self.addcontext([])
1639 self.addcontext([])
1640 h = header(hdr)
1640 h = header(hdr)
1641 self.headers.append(h)
1641 self.headers.append(h)
1642 self.header = h
1642 self.header = h
1643
1643
1644 def addother(self, line):
1644 def addother(self, line):
1645 pass # 'other' lines are ignored
1645 pass # 'other' lines are ignored
1646
1646
1647 def finished(self):
1647 def finished(self):
1648 self.addcontext([])
1648 self.addcontext([])
1649 return self.headers
1649 return self.headers
1650
1650
1651 transitions = {
1651 transitions = {
1652 'file': {'context': addcontext,
1652 'file': {'context': addcontext,
1653 'file': newfile,
1653 'file': newfile,
1654 'hunk': addhunk,
1654 'hunk': addhunk,
1655 'range': addrange},
1655 'range': addrange},
1656 'context': {'file': newfile,
1656 'context': {'file': newfile,
1657 'hunk': addhunk,
1657 'hunk': addhunk,
1658 'range': addrange,
1658 'range': addrange,
1659 'other': addother},
1659 'other': addother},
1660 'hunk': {'context': addcontext,
1660 'hunk': {'context': addcontext,
1661 'file': newfile,
1661 'file': newfile,
1662 'range': addrange},
1662 'range': addrange},
1663 'range': {'context': addcontext,
1663 'range': {'context': addcontext,
1664 'hunk': addhunk},
1664 'hunk': addhunk},
1665 'other': {'other': addother},
1665 'other': {'other': addother},
1666 }
1666 }
1667
1667
1668 p = parser()
1668 p = parser()
1669 fp = stringio()
1669 fp = stringio()
1670 fp.write(''.join(originalchunks))
1670 fp.write(''.join(originalchunks))
1671 fp.seek(0)
1671 fp.seek(0)
1672
1672
1673 state = 'context'
1673 state = 'context'
1674 for newstate, data in scanpatch(fp):
1674 for newstate, data in scanpatch(fp):
1675 try:
1675 try:
1676 p.transitions[state][newstate](p, data)
1676 p.transitions[state][newstate](p, data)
1677 except KeyError:
1677 except KeyError:
1678 raise PatchError('unhandled transition: %s -> %s' %
1678 raise PatchError('unhandled transition: %s -> %s' %
1679 (state, newstate))
1679 (state, newstate))
1680 state = newstate
1680 state = newstate
1681 del fp
1681 del fp
1682 return p.finished()
1682 return p.finished()
1683
1683
1684 def pathtransform(path, strip, prefix):
1684 def pathtransform(path, strip, prefix):
1685 '''turn a path from a patch into a path suitable for the repository
1685 '''turn a path from a patch into a path suitable for the repository
1686
1686
1687 prefix, if not empty, is expected to be normalized with a / at the end.
1687 prefix, if not empty, is expected to be normalized with a / at the end.
1688
1688
1689 Returns (stripped components, path in repository).
1689 Returns (stripped components, path in repository).
1690
1690
1691 >>> pathtransform(b'a/b/c', 0, b'')
1691 >>> pathtransform(b'a/b/c', 0, b'')
1692 ('', 'a/b/c')
1692 ('', 'a/b/c')
1693 >>> pathtransform(b' a/b/c ', 0, b'')
1693 >>> pathtransform(b' a/b/c ', 0, b'')
1694 ('', ' a/b/c')
1694 ('', ' a/b/c')
1695 >>> pathtransform(b' a/b/c ', 2, b'')
1695 >>> pathtransform(b' a/b/c ', 2, b'')
1696 ('a/b/', 'c')
1696 ('a/b/', 'c')
1697 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1697 >>> pathtransform(b'a/b/c', 0, b'd/e/')
1698 ('', 'd/e/a/b/c')
1698 ('', 'd/e/a/b/c')
1699 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1699 >>> pathtransform(b' a//b/c ', 2, b'd/e/')
1700 ('a//b/', 'd/e/c')
1700 ('a//b/', 'd/e/c')
1701 >>> pathtransform(b'a/b/c', 3, b'')
1701 >>> pathtransform(b'a/b/c', 3, b'')
1702 Traceback (most recent call last):
1702 Traceback (most recent call last):
1703 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1703 PatchError: unable to strip away 1 of 3 dirs from a/b/c
1704 '''
1704 '''
1705 pathlen = len(path)
1705 pathlen = len(path)
1706 i = 0
1706 i = 0
1707 if strip == 0:
1707 if strip == 0:
1708 return '', prefix + path.rstrip()
1708 return '', prefix + path.rstrip()
1709 count = strip
1709 count = strip
1710 while count > 0:
1710 while count > 0:
1711 i = path.find('/', i)
1711 i = path.find('/', i)
1712 if i == -1:
1712 if i == -1:
1713 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1713 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
1714 (count, strip, path))
1714 (count, strip, path))
1715 i += 1
1715 i += 1
1716 # consume '//' in the path
1716 # consume '//' in the path
1717 while i < pathlen - 1 and path[i:i + 1] == '/':
1717 while i < pathlen - 1 and path[i:i + 1] == '/':
1718 i += 1
1718 i += 1
1719 count -= 1
1719 count -= 1
1720 return path[:i].lstrip(), prefix + path[i:].rstrip()
1720 return path[:i].lstrip(), prefix + path[i:].rstrip()
1721
1721
1722 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1722 def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix):
1723 nulla = afile_orig == "/dev/null"
1723 nulla = afile_orig == "/dev/null"
1724 nullb = bfile_orig == "/dev/null"
1724 nullb = bfile_orig == "/dev/null"
1725 create = nulla and hunk.starta == 0 and hunk.lena == 0
1725 create = nulla and hunk.starta == 0 and hunk.lena == 0
1726 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1726 remove = nullb and hunk.startb == 0 and hunk.lenb == 0
1727 abase, afile = pathtransform(afile_orig, strip, prefix)
1727 abase, afile = pathtransform(afile_orig, strip, prefix)
1728 gooda = not nulla and backend.exists(afile)
1728 gooda = not nulla and backend.exists(afile)
1729 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1729 bbase, bfile = pathtransform(bfile_orig, strip, prefix)
1730 if afile == bfile:
1730 if afile == bfile:
1731 goodb = gooda
1731 goodb = gooda
1732 else:
1732 else:
1733 goodb = not nullb and backend.exists(bfile)
1733 goodb = not nullb and backend.exists(bfile)
1734 missing = not goodb and not gooda and not create
1734 missing = not goodb and not gooda and not create
1735
1735
1736 # some diff programs apparently produce patches where the afile is
1736 # some diff programs apparently produce patches where the afile is
1737 # not /dev/null, but afile starts with bfile
1737 # not /dev/null, but afile starts with bfile
1738 abasedir = afile[:afile.rfind('/') + 1]
1738 abasedir = afile[:afile.rfind('/') + 1]
1739 bbasedir = bfile[:bfile.rfind('/') + 1]
1739 bbasedir = bfile[:bfile.rfind('/') + 1]
1740 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1740 if (missing and abasedir == bbasedir and afile.startswith(bfile)
1741 and hunk.starta == 0 and hunk.lena == 0):
1741 and hunk.starta == 0 and hunk.lena == 0):
1742 create = True
1742 create = True
1743 missing = False
1743 missing = False
1744
1744
1745 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1745 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
1746 # diff is between a file and its backup. In this case, the original
1746 # diff is between a file and its backup. In this case, the original
1747 # file should be patched (see original mpatch code).
1747 # file should be patched (see original mpatch code).
1748 isbackup = (abase == bbase and bfile.startswith(afile))
1748 isbackup = (abase == bbase and bfile.startswith(afile))
1749 fname = None
1749 fname = None
1750 if not missing:
1750 if not missing:
1751 if gooda and goodb:
1751 if gooda and goodb:
1752 if isbackup:
1752 if isbackup:
1753 fname = afile
1753 fname = afile
1754 else:
1754 else:
1755 fname = bfile
1755 fname = bfile
1756 elif gooda:
1756 elif gooda:
1757 fname = afile
1757 fname = afile
1758
1758
1759 if not fname:
1759 if not fname:
1760 if not nullb:
1760 if not nullb:
1761 if isbackup:
1761 if isbackup:
1762 fname = afile
1762 fname = afile
1763 else:
1763 else:
1764 fname = bfile
1764 fname = bfile
1765 elif not nulla:
1765 elif not nulla:
1766 fname = afile
1766 fname = afile
1767 else:
1767 else:
1768 raise PatchError(_("undefined source and destination files"))
1768 raise PatchError(_("undefined source and destination files"))
1769
1769
1770 gp = patchmeta(fname)
1770 gp = patchmeta(fname)
1771 if create:
1771 if create:
1772 gp.op = 'ADD'
1772 gp.op = 'ADD'
1773 elif remove:
1773 elif remove:
1774 gp.op = 'DELETE'
1774 gp.op = 'DELETE'
1775 return gp
1775 return gp
1776
1776
1777 def scanpatch(fp):
1777 def scanpatch(fp):
1778 """like patch.iterhunks, but yield different events
1778 """like patch.iterhunks, but yield different events
1779
1779
1780 - ('file', [header_lines + fromfile + tofile])
1780 - ('file', [header_lines + fromfile + tofile])
1781 - ('context', [context_lines])
1781 - ('context', [context_lines])
1782 - ('hunk', [hunk_lines])
1782 - ('hunk', [hunk_lines])
1783 - ('range', (-start,len, +start,len, proc))
1783 - ('range', (-start,len, +start,len, proc))
1784 """
1784 """
1785 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1785 lines_re = re.compile(br'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
1786 lr = linereader(fp)
1786 lr = linereader(fp)
1787
1787
1788 def scanwhile(first, p):
1788 def scanwhile(first, p):
1789 """scan lr while predicate holds"""
1789 """scan lr while predicate holds"""
1790 lines = [first]
1790 lines = [first]
1791 for line in iter(lr.readline, ''):
1791 for line in iter(lr.readline, ''):
1792 if p(line):
1792 if p(line):
1793 lines.append(line)
1793 lines.append(line)
1794 else:
1794 else:
1795 lr.push(line)
1795 lr.push(line)
1796 break
1796 break
1797 return lines
1797 return lines
1798
1798
1799 for line in iter(lr.readline, ''):
1799 for line in iter(lr.readline, ''):
1800 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1800 if line.startswith('diff --git a/') or line.startswith('diff -r '):
1801 def notheader(line):
1801 def notheader(line):
1802 s = line.split(None, 1)
1802 s = line.split(None, 1)
1803 return not s or s[0] not in ('---', 'diff')
1803 return not s or s[0] not in ('---', 'diff')
1804 header = scanwhile(line, notheader)
1804 header = scanwhile(line, notheader)
1805 fromfile = lr.readline()
1805 fromfile = lr.readline()
1806 if fromfile.startswith('---'):
1806 if fromfile.startswith('---'):
1807 tofile = lr.readline()
1807 tofile = lr.readline()
1808 header += [fromfile, tofile]
1808 header += [fromfile, tofile]
1809 else:
1809 else:
1810 lr.push(fromfile)
1810 lr.push(fromfile)
1811 yield 'file', header
1811 yield 'file', header
1812 elif line.startswith(' '):
1812 elif line.startswith(' '):
1813 cs = (' ', '\\')
1813 cs = (' ', '\\')
1814 yield 'context', scanwhile(line, lambda l: l.startswith(cs))
1814 yield 'context', scanwhile(line, lambda l: l.startswith(cs))
1815 elif line.startswith(('-', '+')):
1815 elif line.startswith(('-', '+')):
1816 cs = ('-', '+', '\\')
1816 cs = ('-', '+', '\\')
1817 yield 'hunk', scanwhile(line, lambda l: l.startswith(cs))
1817 yield 'hunk', scanwhile(line, lambda l: l.startswith(cs))
1818 else:
1818 else:
1819 m = lines_re.match(line)
1819 m = lines_re.match(line)
1820 if m:
1820 if m:
1821 yield 'range', m.groups()
1821 yield 'range', m.groups()
1822 else:
1822 else:
1823 yield 'other', line
1823 yield 'other', line
1824
1824
1825 def scangitpatch(lr, firstline):
1825 def scangitpatch(lr, firstline):
1826 """
1826 """
1827 Git patches can emit:
1827 Git patches can emit:
1828 - rename a to b
1828 - rename a to b
1829 - change b
1829 - change b
1830 - copy a to c
1830 - copy a to c
1831 - change c
1831 - change c
1832
1832
1833 We cannot apply this sequence as-is, the renamed 'a' could not be
1833 We cannot apply this sequence as-is, the renamed 'a' could not be
1834 found for it would have been renamed already. And we cannot copy
1834 found for it would have been renamed already. And we cannot copy
1835 from 'b' instead because 'b' would have been changed already. So
1835 from 'b' instead because 'b' would have been changed already. So
1836 we scan the git patch for copy and rename commands so we can
1836 we scan the git patch for copy and rename commands so we can
1837 perform the copies ahead of time.
1837 perform the copies ahead of time.
1838 """
1838 """
1839 pos = 0
1839 pos = 0
1840 try:
1840 try:
1841 pos = lr.fp.tell()
1841 pos = lr.fp.tell()
1842 fp = lr.fp
1842 fp = lr.fp
1843 except IOError:
1843 except IOError:
1844 fp = stringio(lr.fp.read())
1844 fp = stringio(lr.fp.read())
1845 gitlr = linereader(fp)
1845 gitlr = linereader(fp)
1846 gitlr.push(firstline)
1846 gitlr.push(firstline)
1847 gitpatches = readgitpatch(gitlr)
1847 gitpatches = readgitpatch(gitlr)
1848 fp.seek(pos)
1848 fp.seek(pos)
1849 return gitpatches
1849 return gitpatches
1850
1850
1851 def iterhunks(fp):
1851 def iterhunks(fp):
1852 """Read a patch and yield the following events:
1852 """Read a patch and yield the following events:
1853 - ("file", afile, bfile, firsthunk): select a new target file.
1853 - ("file", afile, bfile, firsthunk): select a new target file.
1854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1854 - ("hunk", hunk): a new hunk is ready to be applied, follows a
1855 "file" event.
1855 "file" event.
1856 - ("git", gitchanges): current diff is in git format, gitchanges
1856 - ("git", gitchanges): current diff is in git format, gitchanges
1857 maps filenames to gitpatch records. Unique event.
1857 maps filenames to gitpatch records. Unique event.
1858 """
1858 """
1859 afile = ""
1859 afile = ""
1860 bfile = ""
1860 bfile = ""
1861 state = None
1861 state = None
1862 hunknum = 0
1862 hunknum = 0
1863 emitfile = newfile = False
1863 emitfile = newfile = False
1864 gitpatches = None
1864 gitpatches = None
1865
1865
1866 # our states
1866 # our states
1867 BFILE = 1
1867 BFILE = 1
1868 context = None
1868 context = None
1869 lr = linereader(fp)
1869 lr = linereader(fp)
1870
1870
1871 for x in iter(lr.readline, ''):
1871 for x in iter(lr.readline, ''):
1872 if state == BFILE and (
1872 if state == BFILE and (
1873 (not context and x.startswith('@'))
1873 (not context and x.startswith('@'))
1874 or (context is not False and x.startswith('***************'))
1874 or (context is not False and x.startswith('***************'))
1875 or x.startswith('GIT binary patch')):
1875 or x.startswith('GIT binary patch')):
1876 gp = None
1876 gp = None
1877 if (gitpatches and
1877 if (gitpatches and
1878 gitpatches[-1].ispatching(afile, bfile)):
1878 gitpatches[-1].ispatching(afile, bfile)):
1879 gp = gitpatches.pop()
1879 gp = gitpatches.pop()
1880 if x.startswith('GIT binary patch'):
1880 if x.startswith('GIT binary patch'):
1881 h = binhunk(lr, gp.path)
1881 h = binhunk(lr, gp.path)
1882 else:
1882 else:
1883 if context is None and x.startswith('***************'):
1883 if context is None and x.startswith('***************'):
1884 context = True
1884 context = True
1885 h = hunk(x, hunknum + 1, lr, context)
1885 h = hunk(x, hunknum + 1, lr, context)
1886 hunknum += 1
1886 hunknum += 1
1887 if emitfile:
1887 if emitfile:
1888 emitfile = False
1888 emitfile = False
1889 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1889 yield 'file', (afile, bfile, h, gp and gp.copy() or None)
1890 yield 'hunk', h
1890 yield 'hunk', h
1891 elif x.startswith('diff --git a/'):
1891 elif x.startswith('diff --git a/'):
1892 m = gitre.match(x.rstrip(' \r\n'))
1892 m = gitre.match(x.rstrip(' \r\n'))
1893 if not m:
1893 if not m:
1894 continue
1894 continue
1895 if gitpatches is None:
1895 if gitpatches is None:
1896 # scan whole input for git metadata
1896 # scan whole input for git metadata
1897 gitpatches = scangitpatch(lr, x)
1897 gitpatches = scangitpatch(lr, x)
1898 yield 'git', [g.copy() for g in gitpatches
1898 yield 'git', [g.copy() for g in gitpatches
1899 if g.op in ('COPY', 'RENAME')]
1899 if g.op in ('COPY', 'RENAME')]
1900 gitpatches.reverse()
1900 gitpatches.reverse()
1901 afile = 'a/' + m.group(1)
1901 afile = 'a/' + m.group(1)
1902 bfile = 'b/' + m.group(2)
1902 bfile = 'b/' + m.group(2)
1903 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1903 while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
1904 gp = gitpatches.pop()
1904 gp = gitpatches.pop()
1905 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1905 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1906 if not gitpatches:
1906 if not gitpatches:
1907 raise PatchError(_('failed to synchronize metadata for "%s"')
1907 raise PatchError(_('failed to synchronize metadata for "%s"')
1908 % afile[2:])
1908 % afile[2:])
1909 newfile = True
1909 newfile = True
1910 elif x.startswith('---'):
1910 elif x.startswith('---'):
1911 # check for a unified diff
1911 # check for a unified diff
1912 l2 = lr.readline()
1912 l2 = lr.readline()
1913 if not l2.startswith('+++'):
1913 if not l2.startswith('+++'):
1914 lr.push(l2)
1914 lr.push(l2)
1915 continue
1915 continue
1916 newfile = True
1916 newfile = True
1917 context = False
1917 context = False
1918 afile = parsefilename(x)
1918 afile = parsefilename(x)
1919 bfile = parsefilename(l2)
1919 bfile = parsefilename(l2)
1920 elif x.startswith('***'):
1920 elif x.startswith('***'):
1921 # check for a context diff
1921 # check for a context diff
1922 l2 = lr.readline()
1922 l2 = lr.readline()
1923 if not l2.startswith('---'):
1923 if not l2.startswith('---'):
1924 lr.push(l2)
1924 lr.push(l2)
1925 continue
1925 continue
1926 l3 = lr.readline()
1926 l3 = lr.readline()
1927 lr.push(l3)
1927 lr.push(l3)
1928 if not l3.startswith("***************"):
1928 if not l3.startswith("***************"):
1929 lr.push(l2)
1929 lr.push(l2)
1930 continue
1930 continue
1931 newfile = True
1931 newfile = True
1932 context = True
1932 context = True
1933 afile = parsefilename(x)
1933 afile = parsefilename(x)
1934 bfile = parsefilename(l2)
1934 bfile = parsefilename(l2)
1935
1935
1936 if newfile:
1936 if newfile:
1937 newfile = False
1937 newfile = False
1938 emitfile = True
1938 emitfile = True
1939 state = BFILE
1939 state = BFILE
1940 hunknum = 0
1940 hunknum = 0
1941
1941
1942 while gitpatches:
1942 while gitpatches:
1943 gp = gitpatches.pop()
1943 gp = gitpatches.pop()
1944 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1944 yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
1945
1945
1946 def applybindelta(binchunk, data):
1946 def applybindelta(binchunk, data):
1947 """Apply a binary delta hunk
1947 """Apply a binary delta hunk
1948 The algorithm used is the algorithm from git's patch-delta.c
1948 The algorithm used is the algorithm from git's patch-delta.c
1949 """
1949 """
1950 def deltahead(binchunk):
1950 def deltahead(binchunk):
1951 i = 0
1951 i = 0
1952 for c in pycompat.bytestr(binchunk):
1952 for c in pycompat.bytestr(binchunk):
1953 i += 1
1953 i += 1
1954 if not (ord(c) & 0x80):
1954 if not (ord(c) & 0x80):
1955 return i
1955 return i
1956 return i
1956 return i
1957 out = ""
1957 out = ""
1958 s = deltahead(binchunk)
1958 s = deltahead(binchunk)
1959 binchunk = binchunk[s:]
1959 binchunk = binchunk[s:]
1960 s = deltahead(binchunk)
1960 s = deltahead(binchunk)
1961 binchunk = binchunk[s:]
1961 binchunk = binchunk[s:]
1962 i = 0
1962 i = 0
1963 while i < len(binchunk):
1963 while i < len(binchunk):
1964 cmd = ord(binchunk[i:i + 1])
1964 cmd = ord(binchunk[i:i + 1])
1965 i += 1
1965 i += 1
1966 if (cmd & 0x80):
1966 if (cmd & 0x80):
1967 offset = 0
1967 offset = 0
1968 size = 0
1968 size = 0
1969 if (cmd & 0x01):
1969 if (cmd & 0x01):
1970 offset = ord(binchunk[i:i + 1])
1970 offset = ord(binchunk[i:i + 1])
1971 i += 1
1971 i += 1
1972 if (cmd & 0x02):
1972 if (cmd & 0x02):
1973 offset |= ord(binchunk[i:i + 1]) << 8
1973 offset |= ord(binchunk[i:i + 1]) << 8
1974 i += 1
1974 i += 1
1975 if (cmd & 0x04):
1975 if (cmd & 0x04):
1976 offset |= ord(binchunk[i:i + 1]) << 16
1976 offset |= ord(binchunk[i:i + 1]) << 16
1977 i += 1
1977 i += 1
1978 if (cmd & 0x08):
1978 if (cmd & 0x08):
1979 offset |= ord(binchunk[i:i + 1]) << 24
1979 offset |= ord(binchunk[i:i + 1]) << 24
1980 i += 1
1980 i += 1
1981 if (cmd & 0x10):
1981 if (cmd & 0x10):
1982 size = ord(binchunk[i:i + 1])
1982 size = ord(binchunk[i:i + 1])
1983 i += 1
1983 i += 1
1984 if (cmd & 0x20):
1984 if (cmd & 0x20):
1985 size |= ord(binchunk[i:i + 1]) << 8
1985 size |= ord(binchunk[i:i + 1]) << 8
1986 i += 1
1986 i += 1
1987 if (cmd & 0x40):
1987 if (cmd & 0x40):
1988 size |= ord(binchunk[i:i + 1]) << 16
1988 size |= ord(binchunk[i:i + 1]) << 16
1989 i += 1
1989 i += 1
1990 if size == 0:
1990 if size == 0:
1991 size = 0x10000
1991 size = 0x10000
1992 offset_end = offset + size
1992 offset_end = offset + size
1993 out += data[offset:offset_end]
1993 out += data[offset:offset_end]
1994 elif cmd != 0:
1994 elif cmd != 0:
1995 offset_end = i + cmd
1995 offset_end = i + cmd
1996 out += binchunk[i:offset_end]
1996 out += binchunk[i:offset_end]
1997 i += cmd
1997 i += cmd
1998 else:
1998 else:
1999 raise PatchError(_('unexpected delta opcode 0'))
1999 raise PatchError(_('unexpected delta opcode 0'))
2000 return out
2000 return out
2001
2001
2002 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
2002 def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'):
2003 """Reads a patch from fp and tries to apply it.
2003 """Reads a patch from fp and tries to apply it.
2004
2004
2005 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2005 Returns 0 for a clean patch, -1 if any rejects were found and 1 if
2006 there was any fuzz.
2006 there was any fuzz.
2007
2007
2008 If 'eolmode' is 'strict', the patch content and patched file are
2008 If 'eolmode' is 'strict', the patch content and patched file are
2009 read in binary mode. Otherwise, line endings are ignored when
2009 read in binary mode. Otherwise, line endings are ignored when
2010 patching then normalized according to 'eolmode'.
2010 patching then normalized according to 'eolmode'.
2011 """
2011 """
2012 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
2012 return _applydiff(ui, fp, patchfile, backend, store, strip=strip,
2013 prefix=prefix, eolmode=eolmode)
2013 prefix=prefix, eolmode=eolmode)
2014
2014
2015 def _canonprefix(repo, prefix):
2015 def _canonprefix(repo, prefix):
2016 if prefix:
2016 if prefix:
2017 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2017 prefix = pathutil.canonpath(repo.root, repo.getcwd(), prefix)
2018 if prefix != '':
2018 if prefix != '':
2019 prefix += '/'
2019 prefix += '/'
2020 return prefix
2020 return prefix
2021
2021
2022 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
2022 def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='',
2023 eolmode='strict'):
2023 eolmode='strict'):
2024 prefix = _canonprefix(backend.repo, prefix)
2024 prefix = _canonprefix(backend.repo, prefix)
2025 def pstrip(p):
2025 def pstrip(p):
2026 return pathtransform(p, strip - 1, prefix)[1]
2026 return pathtransform(p, strip - 1, prefix)[1]
2027
2027
2028 rejects = 0
2028 rejects = 0
2029 err = 0
2029 err = 0
2030 current_file = None
2030 current_file = None
2031
2031
2032 for state, values in iterhunks(fp):
2032 for state, values in iterhunks(fp):
2033 if state == 'hunk':
2033 if state == 'hunk':
2034 if not current_file:
2034 if not current_file:
2035 continue
2035 continue
2036 ret = current_file.apply(values)
2036 ret = current_file.apply(values)
2037 if ret > 0:
2037 if ret > 0:
2038 err = 1
2038 err = 1
2039 elif state == 'file':
2039 elif state == 'file':
2040 if current_file:
2040 if current_file:
2041 rejects += current_file.close()
2041 rejects += current_file.close()
2042 current_file = None
2042 current_file = None
2043 afile, bfile, first_hunk, gp = values
2043 afile, bfile, first_hunk, gp = values
2044 if gp:
2044 if gp:
2045 gp.path = pstrip(gp.path)
2045 gp.path = pstrip(gp.path)
2046 if gp.oldpath:
2046 if gp.oldpath:
2047 gp.oldpath = pstrip(gp.oldpath)
2047 gp.oldpath = pstrip(gp.oldpath)
2048 else:
2048 else:
2049 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2049 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2050 prefix)
2050 prefix)
2051 if gp.op == 'RENAME':
2051 if gp.op == 'RENAME':
2052 backend.unlink(gp.oldpath)
2052 backend.unlink(gp.oldpath)
2053 if not first_hunk:
2053 if not first_hunk:
2054 if gp.op == 'DELETE':
2054 if gp.op == 'DELETE':
2055 backend.unlink(gp.path)
2055 backend.unlink(gp.path)
2056 continue
2056 continue
2057 data, mode = None, None
2057 data, mode = None, None
2058 if gp.op in ('RENAME', 'COPY'):
2058 if gp.op in ('RENAME', 'COPY'):
2059 data, mode = store.getfile(gp.oldpath)[:2]
2059 data, mode = store.getfile(gp.oldpath)[:2]
2060 if data is None:
2060 if data is None:
2061 # This means that the old path does not exist
2061 # This means that the old path does not exist
2062 raise PatchError(_("source file '%s' does not exist")
2062 raise PatchError(_("source file '%s' does not exist")
2063 % gp.oldpath)
2063 % gp.oldpath)
2064 if gp.mode:
2064 if gp.mode:
2065 mode = gp.mode
2065 mode = gp.mode
2066 if gp.op == 'ADD':
2066 if gp.op == 'ADD':
2067 # Added files without content have no hunk and
2067 # Added files without content have no hunk and
2068 # must be created
2068 # must be created
2069 data = ''
2069 data = ''
2070 if data or mode:
2070 if data or mode:
2071 if (gp.op in ('ADD', 'RENAME', 'COPY')
2071 if (gp.op in ('ADD', 'RENAME', 'COPY')
2072 and backend.exists(gp.path)):
2072 and backend.exists(gp.path)):
2073 raise PatchError(_("cannot create %s: destination "
2073 raise PatchError(_("cannot create %s: destination "
2074 "already exists") % gp.path)
2074 "already exists") % gp.path)
2075 backend.setfile(gp.path, data, mode, gp.oldpath)
2075 backend.setfile(gp.path, data, mode, gp.oldpath)
2076 continue
2076 continue
2077 try:
2077 try:
2078 current_file = patcher(ui, gp, backend, store,
2078 current_file = patcher(ui, gp, backend, store,
2079 eolmode=eolmode)
2079 eolmode=eolmode)
2080 except PatchError as inst:
2080 except PatchError as inst:
2081 ui.warn(str(inst) + '\n')
2081 ui.warn(str(inst) + '\n')
2082 current_file = None
2082 current_file = None
2083 rejects += 1
2083 rejects += 1
2084 continue
2084 continue
2085 elif state == 'git':
2085 elif state == 'git':
2086 for gp in values:
2086 for gp in values:
2087 path = pstrip(gp.oldpath)
2087 path = pstrip(gp.oldpath)
2088 data, mode = backend.getfile(path)
2088 data, mode = backend.getfile(path)
2089 if data is None:
2089 if data is None:
2090 # The error ignored here will trigger a getfile()
2090 # The error ignored here will trigger a getfile()
2091 # error in a place more appropriate for error
2091 # error in a place more appropriate for error
2092 # handling, and will not interrupt the patching
2092 # handling, and will not interrupt the patching
2093 # process.
2093 # process.
2094 pass
2094 pass
2095 else:
2095 else:
2096 store.setfile(path, data, mode)
2096 store.setfile(path, data, mode)
2097 else:
2097 else:
2098 raise error.Abort(_('unsupported parser state: %s') % state)
2098 raise error.Abort(_('unsupported parser state: %s') % state)
2099
2099
2100 if current_file:
2100 if current_file:
2101 rejects += current_file.close()
2101 rejects += current_file.close()
2102
2102
2103 if rejects:
2103 if rejects:
2104 return -1
2104 return -1
2105 return err
2105 return err
2106
2106
2107 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2107 def _externalpatch(ui, repo, patcher, patchname, strip, files,
2108 similarity):
2108 similarity):
2109 """use <patcher> to apply <patchname> to the working directory.
2109 """use <patcher> to apply <patchname> to the working directory.
2110 returns whether patch was applied with fuzz factor."""
2110 returns whether patch was applied with fuzz factor."""
2111
2111
2112 fuzz = False
2112 fuzz = False
2113 args = []
2113 args = []
2114 cwd = repo.root
2114 cwd = repo.root
2115 if cwd:
2115 if cwd:
2116 args.append('-d %s' % procutil.shellquote(cwd))
2116 args.append('-d %s' % procutil.shellquote(cwd))
2117 cmd = ('%s %s -p%d < %s'
2117 cmd = ('%s %s -p%d < %s'
2118 % (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
2118 % (patcher, ' '.join(args), strip, procutil.shellquote(patchname)))
2119 ui.debug('Using external patch tool: %s\n' % cmd)
2119 ui.debug('Using external patch tool: %s\n' % cmd)
2120 fp = procutil.popen(cmd, 'rb')
2120 fp = procutil.popen(cmd, 'rb')
2121 try:
2121 try:
2122 for line in util.iterfile(fp):
2122 for line in util.iterfile(fp):
2123 line = line.rstrip()
2123 line = line.rstrip()
2124 ui.note(line + '\n')
2124 ui.note(line + '\n')
2125 if line.startswith('patching file '):
2125 if line.startswith('patching file '):
2126 pf = util.parsepatchoutput(line)
2126 pf = util.parsepatchoutput(line)
2127 printed_file = False
2127 printed_file = False
2128 files.add(pf)
2128 files.add(pf)
2129 elif line.find('with fuzz') >= 0:
2129 elif line.find('with fuzz') >= 0:
2130 fuzz = True
2130 fuzz = True
2131 if not printed_file:
2131 if not printed_file:
2132 ui.warn(pf + '\n')
2132 ui.warn(pf + '\n')
2133 printed_file = True
2133 printed_file = True
2134 ui.warn(line + '\n')
2134 ui.warn(line + '\n')
2135 elif line.find('saving rejects to file') >= 0:
2135 elif line.find('saving rejects to file') >= 0:
2136 ui.warn(line + '\n')
2136 ui.warn(line + '\n')
2137 elif line.find('FAILED') >= 0:
2137 elif line.find('FAILED') >= 0:
2138 if not printed_file:
2138 if not printed_file:
2139 ui.warn(pf + '\n')
2139 ui.warn(pf + '\n')
2140 printed_file = True
2140 printed_file = True
2141 ui.warn(line + '\n')
2141 ui.warn(line + '\n')
2142 finally:
2142 finally:
2143 if files:
2143 if files:
2144 scmutil.marktouched(repo, files, similarity)
2144 scmutil.marktouched(repo, files, similarity)
2145 code = fp.close()
2145 code = fp.close()
2146 if code:
2146 if code:
2147 raise PatchError(_("patch command failed: %s") %
2147 raise PatchError(_("patch command failed: %s") %
2148 procutil.explainexit(code))
2148 procutil.explainexit(code))
2149 return fuzz
2149 return fuzz
2150
2150
2151 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2151 def patchbackend(ui, backend, patchobj, strip, prefix, files=None,
2152 eolmode='strict'):
2152 eolmode='strict'):
2153 if files is None:
2153 if files is None:
2154 files = set()
2154 files = set()
2155 if eolmode is None:
2155 if eolmode is None:
2156 eolmode = ui.config('patch', 'eol')
2156 eolmode = ui.config('patch', 'eol')
2157 if eolmode.lower() not in eolmodes:
2157 if eolmode.lower() not in eolmodes:
2158 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2158 raise error.Abort(_('unsupported line endings type: %s') % eolmode)
2159 eolmode = eolmode.lower()
2159 eolmode = eolmode.lower()
2160
2160
2161 store = filestore()
2161 store = filestore()
2162 try:
2162 try:
2163 fp = open(patchobj, 'rb')
2163 fp = open(patchobj, 'rb')
2164 except TypeError:
2164 except TypeError:
2165 fp = patchobj
2165 fp = patchobj
2166 try:
2166 try:
2167 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2167 ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix,
2168 eolmode=eolmode)
2168 eolmode=eolmode)
2169 finally:
2169 finally:
2170 if fp != patchobj:
2170 if fp != patchobj:
2171 fp.close()
2171 fp.close()
2172 files.update(backend.close())
2172 files.update(backend.close())
2173 store.close()
2173 store.close()
2174 if ret < 0:
2174 if ret < 0:
2175 raise PatchError(_('patch failed to apply'))
2175 raise PatchError(_('patch failed to apply'))
2176 return ret > 0
2176 return ret > 0
2177
2177
2178 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2178 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None,
2179 eolmode='strict', similarity=0):
2179 eolmode='strict', similarity=0):
2180 """use builtin patch to apply <patchobj> to the working directory.
2180 """use builtin patch to apply <patchobj> to the working directory.
2181 returns whether patch was applied with fuzz factor."""
2181 returns whether patch was applied with fuzz factor."""
2182 backend = workingbackend(ui, repo, similarity)
2182 backend = workingbackend(ui, repo, similarity)
2183 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2183 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2184
2184
2185 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2185 def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None,
2186 eolmode='strict'):
2186 eolmode='strict'):
2187 backend = repobackend(ui, repo, ctx, store)
2187 backend = repobackend(ui, repo, ctx, store)
2188 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2188 return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode)
2189
2189
2190 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2190 def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict',
2191 similarity=0):
2191 similarity=0):
2192 """Apply <patchname> to the working directory.
2192 """Apply <patchname> to the working directory.
2193
2193
2194 'eolmode' specifies how end of lines should be handled. It can be:
2194 'eolmode' specifies how end of lines should be handled. It can be:
2195 - 'strict': inputs are read in binary mode, EOLs are preserved
2195 - 'strict': inputs are read in binary mode, EOLs are preserved
2196 - 'crlf': EOLs are ignored when patching and reset to CRLF
2196 - 'crlf': EOLs are ignored when patching and reset to CRLF
2197 - 'lf': EOLs are ignored when patching and reset to LF
2197 - 'lf': EOLs are ignored when patching and reset to LF
2198 - None: get it from user settings, default to 'strict'
2198 - None: get it from user settings, default to 'strict'
2199 'eolmode' is ignored when using an external patcher program.
2199 'eolmode' is ignored when using an external patcher program.
2200
2200
2201 Returns whether patch was applied with fuzz factor.
2201 Returns whether patch was applied with fuzz factor.
2202 """
2202 """
2203 patcher = ui.config('ui', 'patch')
2203 patcher = ui.config('ui', 'patch')
2204 if files is None:
2204 if files is None:
2205 files = set()
2205 files = set()
2206 if patcher:
2206 if patcher:
2207 return _externalpatch(ui, repo, patcher, patchname, strip,
2207 return _externalpatch(ui, repo, patcher, patchname, strip,
2208 files, similarity)
2208 files, similarity)
2209 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2209 return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode,
2210 similarity)
2210 similarity)
2211
2211
2212 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
2212 def changedfiles(ui, repo, patchpath, strip=1, prefix=''):
2213 backend = fsbackend(ui, repo.root)
2213 backend = fsbackend(ui, repo.root)
2214 prefix = _canonprefix(repo, prefix)
2214 prefix = _canonprefix(repo, prefix)
2215 with open(patchpath, 'rb') as fp:
2215 with open(patchpath, 'rb') as fp:
2216 changed = set()
2216 changed = set()
2217 for state, values in iterhunks(fp):
2217 for state, values in iterhunks(fp):
2218 if state == 'file':
2218 if state == 'file':
2219 afile, bfile, first_hunk, gp = values
2219 afile, bfile, first_hunk, gp = values
2220 if gp:
2220 if gp:
2221 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2221 gp.path = pathtransform(gp.path, strip - 1, prefix)[1]
2222 if gp.oldpath:
2222 if gp.oldpath:
2223 gp.oldpath = pathtransform(gp.oldpath, strip - 1,
2223 gp.oldpath = pathtransform(gp.oldpath, strip - 1,
2224 prefix)[1]
2224 prefix)[1]
2225 else:
2225 else:
2226 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2226 gp = makepatchmeta(backend, afile, bfile, first_hunk, strip,
2227 prefix)
2227 prefix)
2228 changed.add(gp.path)
2228 changed.add(gp.path)
2229 if gp.op == 'RENAME':
2229 if gp.op == 'RENAME':
2230 changed.add(gp.oldpath)
2230 changed.add(gp.oldpath)
2231 elif state not in ('hunk', 'git'):
2231 elif state not in ('hunk', 'git'):
2232 raise error.Abort(_('unsupported parser state: %s') % state)
2232 raise error.Abort(_('unsupported parser state: %s') % state)
2233 return changed
2233 return changed
2234
2234
2235 class GitDiffRequired(Exception):
2235 class GitDiffRequired(Exception):
2236 pass
2236 pass
2237
2237
2238 diffopts = diffutil.diffallopts
2238 diffopts = diffutil.diffallopts
2239 diffallopts = diffutil.diffallopts
2239 diffallopts = diffutil.diffallopts
2240 difffeatureopts = diffutil.difffeatureopts
2240 difffeatureopts = diffutil.difffeatureopts
2241
2241
2242 def diff(repo, node1=None, node2=None, match=None, changes=None,
2242 def diff(repo, node1=None, node2=None, match=None, changes=None,
2243 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2243 opts=None, losedatafn=None, prefix='', relroot='', copy=None,
2244 hunksfilterfn=None):
2244 hunksfilterfn=None):
2245 '''yields diff of changes to files between two nodes, or node and
2245 '''yields diff of changes to files between two nodes, or node and
2246 working directory.
2246 working directory.
2247
2247
2248 if node1 is None, use first dirstate parent instead.
2248 if node1 is None, use first dirstate parent instead.
2249 if node2 is None, compare node1 with working directory.
2249 if node2 is None, compare node1 with working directory.
2250
2250
2251 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2251 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
2252 every time some change cannot be represented with the current
2252 every time some change cannot be represented with the current
2253 patch format. Return False to upgrade to git patch format, True to
2253 patch format. Return False to upgrade to git patch format, True to
2254 accept the loss or raise an exception to abort the diff. It is
2254 accept the loss or raise an exception to abort the diff. It is
2255 called with the name of current file being diffed as 'fn'. If set
2255 called with the name of current file being diffed as 'fn'. If set
2256 to None, patches will always be upgraded to git format when
2256 to None, patches will always be upgraded to git format when
2257 necessary.
2257 necessary.
2258
2258
2259 prefix is a filename prefix that is prepended to all filenames on
2259 prefix is a filename prefix that is prepended to all filenames on
2260 display (used for subrepos).
2260 display (used for subrepos).
2261
2261
2262 relroot, if not empty, must be normalized with a trailing /. Any match
2262 relroot, if not empty, must be normalized with a trailing /. Any match
2263 patterns that fall outside it will be ignored.
2263 patterns that fall outside it will be ignored.
2264
2264
2265 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2265 copy, if not empty, should contain mappings {dst@y: src@x} of copy
2266 information.
2266 information.
2267
2267
2268 hunksfilterfn, if not None, should be a function taking a filectx and
2268 hunksfilterfn, if not None, should be a function taking a filectx and
2269 hunks generator that may yield filtered hunks.
2269 hunks generator that may yield filtered hunks.
2270 '''
2270 '''
2271 for fctx1, fctx2, hdr, hunks in diffhunks(
2271 for fctx1, fctx2, hdr, hunks in diffhunks(
2272 repo, node1=node1, node2=node2,
2272 repo, node1=node1, node2=node2,
2273 match=match, changes=changes, opts=opts,
2273 match=match, changes=changes, opts=opts,
2274 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2274 losedatafn=losedatafn, prefix=prefix, relroot=relroot, copy=copy,
2275 ):
2275 ):
2276 if hunksfilterfn is not None:
2276 if hunksfilterfn is not None:
2277 # If the file has been removed, fctx2 is None; but this should
2277 # If the file has been removed, fctx2 is None; but this should
2278 # not occur here since we catch removed files early in
2278 # not occur here since we catch removed files early in
2279 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2279 # logcmdutil.getlinerangerevs() for 'hg log -L'.
2280 assert fctx2 is not None, \
2280 assert fctx2 is not None, \
2281 'fctx2 unexpectly None in diff hunks filtering'
2281 'fctx2 unexpectly None in diff hunks filtering'
2282 hunks = hunksfilterfn(fctx2, hunks)
2282 hunks = hunksfilterfn(fctx2, hunks)
2283 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2283 text = ''.join(sum((list(hlines) for hrange, hlines in hunks), []))
2284 if hdr and (text or len(hdr) > 1):
2284 if hdr and (text or len(hdr) > 1):
2285 yield '\n'.join(hdr) + '\n'
2285 yield '\n'.join(hdr) + '\n'
2286 if text:
2286 if text:
2287 yield text
2287 yield text
2288
2288
2289 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2289 def diffhunks(repo, node1=None, node2=None, match=None, changes=None,
2290 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2290 opts=None, losedatafn=None, prefix='', relroot='', copy=None):
2291 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2291 """Yield diff of changes to files in the form of (`header`, `hunks`) tuples
2292 where `header` is a list of diff headers and `hunks` is an iterable of
2292 where `header` is a list of diff headers and `hunks` is an iterable of
2293 (`hunkrange`, `hunklines`) tuples.
2293 (`hunkrange`, `hunklines`) tuples.
2294
2294
2295 See diff() for the meaning of parameters.
2295 See diff() for the meaning of parameters.
2296 """
2296 """
2297
2297
2298 if opts is None:
2298 if opts is None:
2299 opts = mdiff.defaultopts
2299 opts = mdiff.defaultopts
2300
2300
2301 if not node1 and not node2:
2301 if not node1 and not node2:
2302 node1 = repo.dirstate.p1()
2302 node1 = repo.dirstate.p1()
2303
2303
2304 def lrugetfilectx():
2304 def lrugetfilectx():
2305 cache = {}
2305 cache = {}
2306 order = collections.deque()
2306 order = collections.deque()
2307 def getfilectx(f, ctx):
2307 def getfilectx(f, ctx):
2308 fctx = ctx.filectx(f, filelog=cache.get(f))
2308 fctx = ctx.filectx(f, filelog=cache.get(f))
2309 if f not in cache:
2309 if f not in cache:
2310 if len(cache) > 20:
2310 if len(cache) > 20:
2311 del cache[order.popleft()]
2311 del cache[order.popleft()]
2312 cache[f] = fctx.filelog()
2312 cache[f] = fctx.filelog()
2313 else:
2313 else:
2314 order.remove(f)
2314 order.remove(f)
2315 order.append(f)
2315 order.append(f)
2316 return fctx
2316 return fctx
2317 return getfilectx
2317 return getfilectx
2318 getfilectx = lrugetfilectx()
2318 getfilectx = lrugetfilectx()
2319
2319
2320 ctx1 = repo[node1]
2320 ctx1 = repo[node1]
2321 ctx2 = repo[node2]
2321 ctx2 = repo[node2]
2322
2322
2323 if relroot:
2323 if relroot:
2324 relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
2324 relrootmatch = scmutil.match(ctx2, pats=[relroot], default='path')
2325 match = matchmod.intersectmatchers(match, relrootmatch)
2325 match = matchmod.intersectmatchers(match, relrootmatch)
2326
2326
2327 if not changes:
2327 if not changes:
2328 changes = ctx1.status(ctx2, match=match)
2328 changes = ctx1.status(ctx2, match=match)
2329 modified, added, removed = changes[:3]
2329 modified, added, removed = changes[:3]
2330
2330
2331 if not modified and not added and not removed:
2331 if not modified and not added and not removed:
2332 return []
2332 return []
2333
2333
2334 if repo.ui.debugflag:
2334 if repo.ui.debugflag:
2335 hexfunc = hex
2335 hexfunc = hex
2336 else:
2336 else:
2337 hexfunc = short
2337 hexfunc = short
2338 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2338 revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node]
2339
2339
2340 if copy is None:
2340 if copy is None:
2341 copy = {}
2341 copy = {}
2342 if opts.git or opts.upgrade:
2342 if opts.git or opts.upgrade:
2343 copy = copies.pathcopies(ctx1, ctx2, match=match)
2343 copy = copies.pathcopies(ctx1, ctx2, match=match)
2344
2344
2345 if relroot:
2345 if relroot:
2346 # filter out copies where source side isn't inside the relative root
2346 # filter out copies where source side isn't inside the relative root
2347 # (copies.pathcopies() already filtered out the destination)
2347 # (copies.pathcopies() already filtered out the destination)
2348 copy = {dst: src for dst, src in copy.iteritems()
2348 copy = {dst: src for dst, src in copy.iteritems()
2349 if src.startswith(relroot)}
2349 if src.startswith(relroot)}
2350
2350
2351 modifiedset = set(modified)
2351 modifiedset = set(modified)
2352 addedset = set(added)
2352 addedset = set(added)
2353 removedset = set(removed)
2353 removedset = set(removed)
2354 for f in modified:
2354 for f in modified:
2355 if f not in ctx1:
2355 if f not in ctx1:
2356 # Fix up added, since merged-in additions appear as
2356 # Fix up added, since merged-in additions appear as
2357 # modifications during merges
2357 # modifications during merges
2358 modifiedset.remove(f)
2358 modifiedset.remove(f)
2359 addedset.add(f)
2359 addedset.add(f)
2360 for f in removed:
2360 for f in removed:
2361 if f not in ctx1:
2361 if f not in ctx1:
2362 # Merged-in additions that are then removed are reported as removed.
2362 # Merged-in additions that are then removed are reported as removed.
2363 # They are not in ctx1, so We don't want to show them in the diff.
2363 # They are not in ctx1, so We don't want to show them in the diff.
2364 removedset.remove(f)
2364 removedset.remove(f)
2365 modified = sorted(modifiedset)
2365 modified = sorted(modifiedset)
2366 added = sorted(addedset)
2366 added = sorted(addedset)
2367 removed = sorted(removedset)
2367 removed = sorted(removedset)
2368 for dst, src in list(copy.items()):
2368 for dst, src in list(copy.items()):
2369 if src not in ctx1:
2369 if src not in ctx1:
2370 # Files merged in during a merge and then copied/renamed are
2370 # Files merged in during a merge and then copied/renamed are
2371 # reported as copies. We want to show them in the diff as additions.
2371 # reported as copies. We want to show them in the diff as additions.
2372 del copy[dst]
2372 del copy[dst]
2373
2373
2374 prefetchmatch = scmutil.matchfiles(
2374 prefetchmatch = scmutil.matchfiles(
2375 repo, list(modifiedset | addedset | removedset))
2375 repo, list(modifiedset | addedset | removedset))
2376 scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
2376 scmutil.prefetchfiles(repo, [ctx1.rev(), ctx2.rev()], prefetchmatch)
2377
2377
2378 def difffn(opts, losedata):
2378 def difffn(opts, losedata):
2379 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2379 return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2380 copy, getfilectx, opts, losedata, prefix, relroot)
2380 copy, getfilectx, opts, losedata, prefix, relroot)
2381 if opts.upgrade and not opts.git:
2381 if opts.upgrade and not opts.git:
2382 try:
2382 try:
2383 def losedata(fn):
2383 def losedata(fn):
2384 if not losedatafn or not losedatafn(fn=fn):
2384 if not losedatafn or not losedatafn(fn=fn):
2385 raise GitDiffRequired
2385 raise GitDiffRequired
2386 # Buffer the whole output until we are sure it can be generated
2386 # Buffer the whole output until we are sure it can be generated
2387 return list(difffn(opts.copy(git=False), losedata))
2387 return list(difffn(opts.copy(git=False), losedata))
2388 except GitDiffRequired:
2388 except GitDiffRequired:
2389 return difffn(opts.copy(git=True), None)
2389 return difffn(opts.copy(git=True), None)
2390 else:
2390 else:
2391 return difffn(opts, None)
2391 return difffn(opts, None)
2392
2392
2393 def diffsinglehunk(hunklines):
2393 def diffsinglehunk(hunklines):
2394 """yield tokens for a list of lines in a single hunk"""
2394 """yield tokens for a list of lines in a single hunk"""
2395 for line in hunklines:
2395 for line in hunklines:
2396 # chomp
2396 # chomp
2397 chompline = line.rstrip('\r\n')
2397 chompline = line.rstrip('\r\n')
2398 # highlight tabs and trailing whitespace
2398 # highlight tabs and trailing whitespace
2399 stripline = chompline.rstrip()
2399 stripline = chompline.rstrip()
2400 if line.startswith('-'):
2400 if line.startswith('-'):
2401 label = 'diff.deleted'
2401 label = 'diff.deleted'
2402 elif line.startswith('+'):
2402 elif line.startswith('+'):
2403 label = 'diff.inserted'
2403 label = 'diff.inserted'
2404 else:
2404 else:
2405 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2405 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2406 for token in tabsplitter.findall(stripline):
2406 for token in tabsplitter.findall(stripline):
2407 if token.startswith('\t'):
2407 if token.startswith('\t'):
2408 yield (token, 'diff.tab')
2408 yield (token, 'diff.tab')
2409 else:
2409 else:
2410 yield (token, label)
2410 yield (token, label)
2411
2411
2412 if chompline != stripline:
2412 if chompline != stripline:
2413 yield (chompline[len(stripline):], 'diff.trailingwhitespace')
2413 yield (chompline[len(stripline):], 'diff.trailingwhitespace')
2414 if chompline != line:
2414 if chompline != line:
2415 yield (line[len(chompline):], '')
2415 yield (line[len(chompline):], '')
2416
2416
2417 def diffsinglehunkinline(hunklines):
2417 def diffsinglehunkinline(hunklines):
2418 """yield tokens for a list of lines in a single hunk, with inline colors"""
2418 """yield tokens for a list of lines in a single hunk, with inline colors"""
2419 # prepare deleted, and inserted content
2419 # prepare deleted, and inserted content
2420 a = ''
2420 a = ''
2421 b = ''
2421 b = ''
2422 for line in hunklines:
2422 for line in hunklines:
2423 if line[0:1] == '-':
2423 if line[0:1] == '-':
2424 a += line[1:]
2424 a += line[1:]
2425 elif line[0:1] == '+':
2425 elif line[0:1] == '+':
2426 b += line[1:]
2426 b += line[1:]
2427 else:
2427 else:
2428 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2428 raise error.ProgrammingError('unexpected hunk line: %s' % line)
2429 # fast path: if either side is empty, use diffsinglehunk
2429 # fast path: if either side is empty, use diffsinglehunk
2430 if not a or not b:
2430 if not a or not b:
2431 for t in diffsinglehunk(hunklines):
2431 for t in diffsinglehunk(hunklines):
2432 yield t
2432 yield t
2433 return
2433 return
2434 # re-split the content into words
2434 # re-split the content into words
2435 al = wordsplitter.findall(a)
2435 al = wordsplitter.findall(a)
2436 bl = wordsplitter.findall(b)
2436 bl = wordsplitter.findall(b)
2437 # re-arrange the words to lines since the diff algorithm is line-based
2437 # re-arrange the words to lines since the diff algorithm is line-based
2438 aln = [s if s == '\n' else s + '\n' for s in al]
2438 aln = [s if s == '\n' else s + '\n' for s in al]
2439 bln = [s if s == '\n' else s + '\n' for s in bl]
2439 bln = [s if s == '\n' else s + '\n' for s in bl]
2440 an = ''.join(aln)
2440 an = ''.join(aln)
2441 bn = ''.join(bln)
2441 bn = ''.join(bln)
2442 # run the diff algorithm, prepare atokens and btokens
2442 # run the diff algorithm, prepare atokens and btokens
2443 atokens = []
2443 atokens = []
2444 btokens = []
2444 btokens = []
2445 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2445 blocks = mdiff.allblocks(an, bn, lines1=aln, lines2=bln)
2446 for (a1, a2, b1, b2), btype in blocks:
2446 for (a1, a2, b1, b2), btype in blocks:
2447 changed = btype == '!'
2447 changed = btype == '!'
2448 for token in mdiff.splitnewlines(''.join(al[a1:a2])):
2448 for token in mdiff.splitnewlines(''.join(al[a1:a2])):
2449 atokens.append((changed, token))
2449 atokens.append((changed, token))
2450 for token in mdiff.splitnewlines(''.join(bl[b1:b2])):
2450 for token in mdiff.splitnewlines(''.join(bl[b1:b2])):
2451 btokens.append((changed, token))
2451 btokens.append((changed, token))
2452
2452
2453 # yield deleted tokens, then inserted ones
2453 # yield deleted tokens, then inserted ones
2454 for prefix, label, tokens in [('-', 'diff.deleted', atokens),
2454 for prefix, label, tokens in [('-', 'diff.deleted', atokens),
2455 ('+', 'diff.inserted', btokens)]:
2455 ('+', 'diff.inserted', btokens)]:
2456 nextisnewline = True
2456 nextisnewline = True
2457 for changed, token in tokens:
2457 for changed, token in tokens:
2458 if nextisnewline:
2458 if nextisnewline:
2459 yield (prefix, label)
2459 yield (prefix, label)
2460 nextisnewline = False
2460 nextisnewline = False
2461 # special handling line end
2461 # special handling line end
2462 isendofline = token.endswith('\n')
2462 isendofline = token.endswith('\n')
2463 if isendofline:
2463 if isendofline:
2464 chomp = token[:-1] # chomp
2464 chomp = token[:-1] # chomp
2465 if chomp.endswith('\r'):
2465 if chomp.endswith('\r'):
2466 chomp = chomp[:-1]
2466 chomp = chomp[:-1]
2467 endofline = token[len(chomp):]
2467 endofline = token[len(chomp):]
2468 token = chomp.rstrip() # detect spaces at the end
2468 token = chomp.rstrip() # detect spaces at the end
2469 endspaces = chomp[len(token):]
2469 endspaces = chomp[len(token):]
2470 # scan tabs
2470 # scan tabs
2471 for maybetab in tabsplitter.findall(token):
2471 for maybetab in tabsplitter.findall(token):
2472 if b'\t' == maybetab[0:1]:
2472 if b'\t' == maybetab[0:1]:
2473 currentlabel = 'diff.tab'
2473 currentlabel = 'diff.tab'
2474 else:
2474 else:
2475 if changed:
2475 if changed:
2476 currentlabel = label + '.changed'
2476 currentlabel = label + '.changed'
2477 else:
2477 else:
2478 currentlabel = label + '.unchanged'
2478 currentlabel = label + '.unchanged'
2479 yield (maybetab, currentlabel)
2479 yield (maybetab, currentlabel)
2480 if isendofline:
2480 if isendofline:
2481 if endspaces:
2481 if endspaces:
2482 yield (endspaces, 'diff.trailingwhitespace')
2482 yield (endspaces, 'diff.trailingwhitespace')
2483 yield (endofline, '')
2483 yield (endofline, '')
2484 nextisnewline = True
2484 nextisnewline = True
2485
2485
2486 def difflabel(func, *args, **kw):
2486 def difflabel(func, *args, **kw):
2487 '''yields 2-tuples of (output, label) based on the output of func()'''
2487 '''yields 2-tuples of (output, label) based on the output of func()'''
2488 if kw.get(r'opts') and kw[r'opts'].worddiff:
2488 if kw.get(r'opts') and kw[r'opts'].worddiff:
2489 dodiffhunk = diffsinglehunkinline
2489 dodiffhunk = diffsinglehunkinline
2490 else:
2490 else:
2491 dodiffhunk = diffsinglehunk
2491 dodiffhunk = diffsinglehunk
2492 headprefixes = [('diff', 'diff.diffline'),
2492 headprefixes = [('diff', 'diff.diffline'),
2493 ('copy', 'diff.extended'),
2493 ('copy', 'diff.extended'),
2494 ('rename', 'diff.extended'),
2494 ('rename', 'diff.extended'),
2495 ('old', 'diff.extended'),
2495 ('old', 'diff.extended'),
2496 ('new', 'diff.extended'),
2496 ('new', 'diff.extended'),
2497 ('deleted', 'diff.extended'),
2497 ('deleted', 'diff.extended'),
2498 ('index', 'diff.extended'),
2498 ('index', 'diff.extended'),
2499 ('similarity', 'diff.extended'),
2499 ('similarity', 'diff.extended'),
2500 ('---', 'diff.file_a'),
2500 ('---', 'diff.file_a'),
2501 ('+++', 'diff.file_b')]
2501 ('+++', 'diff.file_b')]
2502 textprefixes = [('@', 'diff.hunk'),
2502 textprefixes = [('@', 'diff.hunk'),
2503 # - and + are handled by diffsinglehunk
2503 # - and + are handled by diffsinglehunk
2504 ]
2504 ]
2505 head = False
2505 head = False
2506
2506
2507 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2507 # buffers a hunk, i.e. adjacent "-", "+" lines without other changes.
2508 hunkbuffer = []
2508 hunkbuffer = []
2509 def consumehunkbuffer():
2509 def consumehunkbuffer():
2510 if hunkbuffer:
2510 if hunkbuffer:
2511 for token in dodiffhunk(hunkbuffer):
2511 for token in dodiffhunk(hunkbuffer):
2512 yield token
2512 yield token
2513 hunkbuffer[:] = []
2513 hunkbuffer[:] = []
2514
2514
2515 for chunk in func(*args, **kw):
2515 for chunk in func(*args, **kw):
2516 lines = chunk.split('\n')
2516 lines = chunk.split('\n')
2517 linecount = len(lines)
2517 linecount = len(lines)
2518 for i, line in enumerate(lines):
2518 for i, line in enumerate(lines):
2519 if head:
2519 if head:
2520 if line.startswith('@'):
2520 if line.startswith('@'):
2521 head = False
2521 head = False
2522 else:
2522 else:
2523 if line and not line.startswith((' ', '+', '-', '@', '\\')):
2523 if line and not line.startswith((' ', '+', '-', '@', '\\')):
2524 head = True
2524 head = True
2525 diffline = False
2525 diffline = False
2526 if not head and line and line.startswith(('+', '-')):
2526 if not head and line and line.startswith(('+', '-')):
2527 diffline = True
2527 diffline = True
2528
2528
2529 prefixes = textprefixes
2529 prefixes = textprefixes
2530 if head:
2530 if head:
2531 prefixes = headprefixes
2531 prefixes = headprefixes
2532 if diffline:
2532 if diffline:
2533 # buffered
2533 # buffered
2534 bufferedline = line
2534 bufferedline = line
2535 if i + 1 < linecount:
2535 if i + 1 < linecount:
2536 bufferedline += "\n"
2536 bufferedline += "\n"
2537 hunkbuffer.append(bufferedline)
2537 hunkbuffer.append(bufferedline)
2538 else:
2538 else:
2539 # unbuffered
2539 # unbuffered
2540 for token in consumehunkbuffer():
2540 for token in consumehunkbuffer():
2541 yield token
2541 yield token
2542 stripline = line.rstrip()
2542 stripline = line.rstrip()
2543 for prefix, label in prefixes:
2543 for prefix, label in prefixes:
2544 if stripline.startswith(prefix):
2544 if stripline.startswith(prefix):
2545 yield (stripline, label)
2545 yield (stripline, label)
2546 if line != stripline:
2546 if line != stripline:
2547 yield (line[len(stripline):],
2547 yield (line[len(stripline):],
2548 'diff.trailingwhitespace')
2548 'diff.trailingwhitespace')
2549 break
2549 break
2550 else:
2550 else:
2551 yield (line, '')
2551 yield (line, '')
2552 if i + 1 < linecount:
2552 if i + 1 < linecount:
2553 yield ('\n', '')
2553 yield ('\n', '')
2554 for token in consumehunkbuffer():
2554 for token in consumehunkbuffer():
2555 yield token
2555 yield token
2556
2556
2557 def diffui(*args, **kw):
2557 def diffui(*args, **kw):
2558 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2558 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
2559 return difflabel(diff, *args, **kw)
2559 return difflabel(diff, *args, **kw)
2560
2560
2561 def _filepairs(modified, added, removed, copy, opts):
2561 def _filepairs(modified, added, removed, copy, opts):
2562 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2562 '''generates tuples (f1, f2, copyop), where f1 is the name of the file
2563 before and f2 is the the name after. For added files, f1 will be None,
2563 before and f2 is the the name after. For added files, f1 will be None,
2564 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2564 and for removed files, f2 will be None. copyop may be set to None, 'copy'
2565 or 'rename' (the latter two only if opts.git is set).'''
2565 or 'rename' (the latter two only if opts.git is set).'''
2566 gone = set()
2566 gone = set()
2567
2567
2568 copyto = dict([(v, k) for k, v in copy.items()])
2568 copyto = dict([(v, k) for k, v in copy.items()])
2569
2569
2570 addedset, removedset = set(added), set(removed)
2570 addedset, removedset = set(added), set(removed)
2571
2571
2572 for f in sorted(modified + added + removed):
2572 for f in sorted(modified + added + removed):
2573 copyop = None
2573 copyop = None
2574 f1, f2 = f, f
2574 f1, f2 = f, f
2575 if f in addedset:
2575 if f in addedset:
2576 f1 = None
2576 f1 = None
2577 if f in copy:
2577 if f in copy:
2578 if opts.git:
2578 if opts.git:
2579 f1 = copy[f]
2579 f1 = copy[f]
2580 if f1 in removedset and f1 not in gone:
2580 if f1 in removedset and f1 not in gone:
2581 copyop = 'rename'
2581 copyop = 'rename'
2582 gone.add(f1)
2582 gone.add(f1)
2583 else:
2583 else:
2584 copyop = 'copy'
2584 copyop = 'copy'
2585 elif f in removedset:
2585 elif f in removedset:
2586 f2 = None
2586 f2 = None
2587 if opts.git:
2587 if opts.git:
2588 # have we already reported a copy above?
2588 # have we already reported a copy above?
2589 if (f in copyto and copyto[f] in addedset
2589 if (f in copyto and copyto[f] in addedset
2590 and copy[copyto[f]] == f):
2590 and copy[copyto[f]] == f):
2591 continue
2591 continue
2592 yield f1, f2, copyop
2592 yield f1, f2, copyop
2593
2593
2594 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2594 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
2595 copy, getfilectx, opts, losedatafn, prefix, relroot):
2595 copy, getfilectx, opts, losedatafn, prefix, relroot):
2596 '''given input data, generate a diff and yield it in blocks
2596 '''given input data, generate a diff and yield it in blocks
2597
2597
2598 If generating a diff would lose data like flags or binary data and
2598 If generating a diff would lose data like flags or binary data and
2599 losedatafn is not None, it will be called.
2599 losedatafn is not None, it will be called.
2600
2600
2601 relroot is removed and prefix is added to every path in the diff output.
2601 relroot is removed and prefix is added to every path in the diff output.
2602
2602
2603 If relroot is not empty, this function expects every path in modified,
2603 If relroot is not empty, this function expects every path in modified,
2604 added, removed and copy to start with it.'''
2604 added, removed and copy to start with it.'''
2605
2605
2606 def gitindex(text):
2606 def gitindex(text):
2607 if not text:
2607 if not text:
2608 text = ""
2608 text = ""
2609 l = len(text)
2609 l = len(text)
2610 s = hashlib.sha1('blob %d\0' % l)
2610 s = hashlib.sha1('blob %d\0' % l)
2611 s.update(text)
2611 s.update(text)
2612 return hex(s.digest())
2612 return hex(s.digest())
2613
2613
2614 if opts.noprefix:
2614 if opts.noprefix:
2615 aprefix = bprefix = ''
2615 aprefix = bprefix = ''
2616 else:
2616 else:
2617 aprefix = 'a/'
2617 aprefix = 'a/'
2618 bprefix = 'b/'
2618 bprefix = 'b/'
2619
2619
2620 def diffline(f, revs):
2620 def diffline(f, revs):
2621 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2621 revinfo = ' '.join(["-r %s" % rev for rev in revs])
2622 return 'diff %s %s' % (revinfo, f)
2622 return 'diff %s %s' % (revinfo, f)
2623
2623
2624 def isempty(fctx):
2624 def isempty(fctx):
2625 return fctx is None or fctx.size() == 0
2625 return fctx is None or fctx.size() == 0
2626
2626
2627 date1 = dateutil.datestr(ctx1.date())
2627 date1 = dateutil.datestr(ctx1.date())
2628 date2 = dateutil.datestr(ctx2.date())
2628 date2 = dateutil.datestr(ctx2.date())
2629
2629
2630 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2630 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
2631
2631
2632 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2632 if relroot != '' and (repo.ui.configbool('devel', 'all-warnings')
2633 or repo.ui.configbool('devel', 'check-relroot')):
2633 or repo.ui.configbool('devel', 'check-relroot')):
2634 for f in modified + added + removed + list(copy) + list(copy.values()):
2634 for f in modified + added + removed + list(copy) + list(copy.values()):
2635 if f is not None and not f.startswith(relroot):
2635 if f is not None and not f.startswith(relroot):
2636 raise AssertionError(
2636 raise AssertionError(
2637 "file %s doesn't start with relroot %s" % (f, relroot))
2637 "file %s doesn't start with relroot %s" % (f, relroot))
2638
2638
2639 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2639 for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts):
2640 content1 = None
2640 content1 = None
2641 content2 = None
2641 content2 = None
2642 fctx1 = None
2642 fctx1 = None
2643 fctx2 = None
2643 fctx2 = None
2644 flag1 = None
2644 flag1 = None
2645 flag2 = None
2645 flag2 = None
2646 if f1:
2646 if f1:
2647 fctx1 = getfilectx(f1, ctx1)
2647 fctx1 = getfilectx(f1, ctx1)
2648 if opts.git or losedatafn:
2648 if opts.git or losedatafn:
2649 flag1 = ctx1.flags(f1)
2649 flag1 = ctx1.flags(f1)
2650 if f2:
2650 if f2:
2651 fctx2 = getfilectx(f2, ctx2)
2651 fctx2 = getfilectx(f2, ctx2)
2652 if opts.git or losedatafn:
2652 if opts.git or losedatafn:
2653 flag2 = ctx2.flags(f2)
2653 flag2 = ctx2.flags(f2)
2654 # if binary is True, output "summary" or "base85", but not "text diff"
2654 # if binary is True, output "summary" or "base85", but not "text diff"
2655 if opts.text:
2655 if opts.text:
2656 binary = False
2656 binary = False
2657 else:
2657 else:
2658 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2658 binary = any(f.isbinary() for f in [fctx1, fctx2] if f is not None)
2659
2659
2660 if losedatafn and not opts.git:
2660 if losedatafn and not opts.git:
2661 if (binary or
2661 if (binary or
2662 # copy/rename
2662 # copy/rename
2663 f2 in copy or
2663 f2 in copy or
2664 # empty file creation
2664 # empty file creation
2665 (not f1 and isempty(fctx2)) or
2665 (not f1 and isempty(fctx2)) or
2666 # empty file deletion
2666 # empty file deletion
2667 (isempty(fctx1) and not f2) or
2667 (isempty(fctx1) and not f2) or
2668 # create with flags
2668 # create with flags
2669 (not f1 and flag2) or
2669 (not f1 and flag2) or
2670 # change flags
2670 # change flags
2671 (f1 and f2 and flag1 != flag2)):
2671 (f1 and f2 and flag1 != flag2)):
2672 losedatafn(f2 or f1)
2672 losedatafn(f2 or f1)
2673
2673
2674 path1 = f1 or f2
2674 path1 = f1 or f2
2675 path2 = f2 or f1
2675 path2 = f2 or f1
2676 path1 = posixpath.join(prefix, path1[len(relroot):])
2676 path1 = posixpath.join(prefix, path1[len(relroot):])
2677 path2 = posixpath.join(prefix, path2[len(relroot):])
2677 path2 = posixpath.join(prefix, path2[len(relroot):])
2678 header = []
2678 header = []
2679 if opts.git:
2679 if opts.git:
2680 header.append('diff --git %s%s %s%s' %
2680 header.append('diff --git %s%s %s%s' %
2681 (aprefix, path1, bprefix, path2))
2681 (aprefix, path1, bprefix, path2))
2682 if not f1: # added
2682 if not f1: # added
2683 header.append('new file mode %s' % gitmode[flag2])
2683 header.append('new file mode %s' % gitmode[flag2])
2684 elif not f2: # removed
2684 elif not f2: # removed
2685 header.append('deleted file mode %s' % gitmode[flag1])
2685 header.append('deleted file mode %s' % gitmode[flag1])
2686 else: # modified/copied/renamed
2686 else: # modified/copied/renamed
2687 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2687 mode1, mode2 = gitmode[flag1], gitmode[flag2]
2688 if mode1 != mode2:
2688 if mode1 != mode2:
2689 header.append('old mode %s' % mode1)
2689 header.append('old mode %s' % mode1)
2690 header.append('new mode %s' % mode2)
2690 header.append('new mode %s' % mode2)
2691 if copyop is not None:
2691 if copyop is not None:
2692 if opts.showsimilarity:
2692 if opts.showsimilarity:
2693 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2693 sim = similar.score(ctx1[path1], ctx2[path2]) * 100
2694 header.append('similarity index %d%%' % sim)
2694 header.append('similarity index %d%%' % sim)
2695 header.append('%s from %s' % (copyop, path1))
2695 header.append('%s from %s' % (copyop, path1))
2696 header.append('%s to %s' % (copyop, path2))
2696 header.append('%s to %s' % (copyop, path2))
2697 elif revs and not repo.ui.quiet:
2697 elif revs and not repo.ui.quiet:
2698 header.append(diffline(path1, revs))
2698 header.append(diffline(path1, revs))
2699
2699
2700 # fctx.is | diffopts | what to | is fctx.data()
2700 # fctx.is | diffopts | what to | is fctx.data()
2701 # binary() | text nobinary git index | output? | outputted?
2701 # binary() | text nobinary git index | output? | outputted?
2702 # ------------------------------------|----------------------------
2702 # ------------------------------------|----------------------------
2703 # yes | no no no * | summary | no
2703 # yes | no no no * | summary | no
2704 # yes | no no yes * | base85 | yes
2704 # yes | no no yes * | base85 | yes
2705 # yes | no yes no * | summary | no
2705 # yes | no yes no * | summary | no
2706 # yes | no yes yes 0 | summary | no
2706 # yes | no yes yes 0 | summary | no
2707 # yes | no yes yes >0 | summary | semi [1]
2707 # yes | no yes yes >0 | summary | semi [1]
2708 # yes | yes * * * | text diff | yes
2708 # yes | yes * * * | text diff | yes
2709 # no | * * * * | text diff | yes
2709 # no | * * * * | text diff | yes
2710 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2710 # [1]: hash(fctx.data()) is outputted. so fctx.data() cannot be faked
2711 if binary and (not opts.git or (opts.git and opts.nobinary and not
2711 if binary and (not opts.git or (opts.git and opts.nobinary and not
2712 opts.index)):
2712 opts.index)):
2713 # fast path: no binary content will be displayed, content1 and
2713 # fast path: no binary content will be displayed, content1 and
2714 # content2 are only used for equivalent test. cmp() could have a
2714 # content2 are only used for equivalent test. cmp() could have a
2715 # fast path.
2715 # fast path.
2716 if fctx1 is not None:
2716 if fctx1 is not None:
2717 content1 = b'\0'
2717 content1 = b'\0'
2718 if fctx2 is not None:
2718 if fctx2 is not None:
2719 if fctx1 is not None and not fctx1.cmp(fctx2):
2719 if fctx1 is not None and not fctx1.cmp(fctx2):
2720 content2 = b'\0' # not different
2720 content2 = b'\0' # not different
2721 else:
2721 else:
2722 content2 = b'\0\0'
2722 content2 = b'\0\0'
2723 else:
2723 else:
2724 # normal path: load contents
2724 # normal path: load contents
2725 if fctx1 is not None:
2725 if fctx1 is not None:
2726 content1 = fctx1.data()
2726 content1 = fctx1.data()
2727 if fctx2 is not None:
2727 if fctx2 is not None:
2728 content2 = fctx2.data()
2728 content2 = fctx2.data()
2729
2729
2730 if binary and opts.git and not opts.nobinary:
2730 if binary and opts.git and not opts.nobinary:
2731 text = mdiff.b85diff(content1, content2)
2731 text = mdiff.b85diff(content1, content2)
2732 if text:
2732 if text:
2733 header.append('index %s..%s' %
2733 header.append('index %s..%s' %
2734 (gitindex(content1), gitindex(content2)))
2734 (gitindex(content1), gitindex(content2)))
2735 hunks = (None, [text]),
2735 hunks = (None, [text]),
2736 else:
2736 else:
2737 if opts.git and opts.index > 0:
2737 if opts.git and opts.index > 0:
2738 flag = flag1
2738 flag = flag1
2739 if flag is None:
2739 if flag is None:
2740 flag = flag2
2740 flag = flag2
2741 header.append('index %s..%s %s' %
2741 header.append('index %s..%s %s' %
2742 (gitindex(content1)[0:opts.index],
2742 (gitindex(content1)[0:opts.index],
2743 gitindex(content2)[0:opts.index],
2743 gitindex(content2)[0:opts.index],
2744 gitmode[flag]))
2744 gitmode[flag]))
2745
2745
2746 uheaders, hunks = mdiff.unidiff(content1, date1,
2746 uheaders, hunks = mdiff.unidiff(content1, date1,
2747 content2, date2,
2747 content2, date2,
2748 path1, path2,
2748 path1, path2,
2749 binary=binary, opts=opts)
2749 binary=binary, opts=opts)
2750 header.extend(uheaders)
2750 header.extend(uheaders)
2751 yield fctx1, fctx2, header, hunks
2751 yield fctx1, fctx2, header, hunks
2752
2752
2753 def diffstatsum(stats):
2753 def diffstatsum(stats):
2754 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2754 maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False
2755 for f, a, r, b in stats:
2755 for f, a, r, b in stats:
2756 maxfile = max(maxfile, encoding.colwidth(f))
2756 maxfile = max(maxfile, encoding.colwidth(f))
2757 maxtotal = max(maxtotal, a + r)
2757 maxtotal = max(maxtotal, a + r)
2758 addtotal += a
2758 addtotal += a
2759 removetotal += r
2759 removetotal += r
2760 binary = binary or b
2760 binary = binary or b
2761
2761
2762 return maxfile, maxtotal, addtotal, removetotal, binary
2762 return maxfile, maxtotal, addtotal, removetotal, binary
2763
2763
2764 def diffstatdata(lines):
2764 def diffstatdata(lines):
2765 diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
2765 diffre = re.compile(br'^diff .*-r [a-z0-9]+\s(.*)$')
2766
2766
2767 results = []
2767 results = []
2768 filename, adds, removes, isbinary = None, 0, 0, False
2768 filename, adds, removes, isbinary = None, 0, 0, False
2769
2769
2770 def addresult():
2770 def addresult():
2771 if filename:
2771 if filename:
2772 results.append((filename, adds, removes, isbinary))
2772 results.append((filename, adds, removes, isbinary))
2773
2773
2774 # inheader is used to track if a line is in the
2774 # inheader is used to track if a line is in the
2775 # header portion of the diff. This helps properly account
2775 # header portion of the diff. This helps properly account
2776 # for lines that start with '--' or '++'
2776 # for lines that start with '--' or '++'
2777 inheader = False
2777 inheader = False
2778
2778
2779 for line in lines:
2779 for line in lines:
2780 if line.startswith('diff'):
2780 if line.startswith('diff'):
2781 addresult()
2781 addresult()
2782 # starting a new file diff
2782 # starting a new file diff
2783 # set numbers to 0 and reset inheader
2783 # set numbers to 0 and reset inheader
2784 inheader = True
2784 inheader = True
2785 adds, removes, isbinary = 0, 0, False
2785 adds, removes, isbinary = 0, 0, False
2786 if line.startswith('diff --git a/'):
2786 if line.startswith('diff --git a/'):
2787 filename = gitre.search(line).group(2)
2787 filename = gitre.search(line).group(2)
2788 elif line.startswith('diff -r'):
2788 elif line.startswith('diff -r'):
2789 # format: "diff -r ... -r ... filename"
2789 # format: "diff -r ... -r ... filename"
2790 filename = diffre.search(line).group(1)
2790 filename = diffre.search(line).group(1)
2791 elif line.startswith('@@'):
2791 elif line.startswith('@@'):
2792 inheader = False
2792 inheader = False
2793 elif line.startswith('+') and not inheader:
2793 elif line.startswith('+') and not inheader:
2794 adds += 1
2794 adds += 1
2795 elif line.startswith('-') and not inheader:
2795 elif line.startswith('-') and not inheader:
2796 removes += 1
2796 removes += 1
2797 elif (line.startswith('GIT binary patch') or
2797 elif (line.startswith('GIT binary patch') or
2798 line.startswith('Binary file')):
2798 line.startswith('Binary file')):
2799 isbinary = True
2799 isbinary = True
2800 elif line.startswith('rename from'):
2800 elif line.startswith('rename from'):
2801 filename = line[12:]
2801 filename = line[12:]
2802 elif line.startswith('rename to'):
2802 elif line.startswith('rename to'):
2803 filename += ' => %s' % line[10:]
2803 filename += ' => %s' % line[10:]
2804 addresult()
2804 addresult()
2805 return results
2805 return results
2806
2806
2807 def diffstat(lines, width=80):
2807 def diffstat(lines, width=80):
2808 output = []
2808 output = []
2809 stats = diffstatdata(lines)
2809 stats = diffstatdata(lines)
2810 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2810 maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats)
2811
2811
2812 countwidth = len(str(maxtotal))
2812 countwidth = len(str(maxtotal))
2813 if hasbinary and countwidth < 3:
2813 if hasbinary and countwidth < 3:
2814 countwidth = 3
2814 countwidth = 3
2815 graphwidth = width - countwidth - maxname - 6
2815 graphwidth = width - countwidth - maxname - 6
2816 if graphwidth < 10:
2816 if graphwidth < 10:
2817 graphwidth = 10
2817 graphwidth = 10
2818
2818
2819 def scale(i):
2819 def scale(i):
2820 if maxtotal <= graphwidth:
2820 if maxtotal <= graphwidth:
2821 return i
2821 return i
2822 # If diffstat runs out of room it doesn't print anything,
2822 # If diffstat runs out of room it doesn't print anything,
2823 # which isn't very useful, so always print at least one + or -
2823 # which isn't very useful, so always print at least one + or -
2824 # if there were at least some changes.
2824 # if there were at least some changes.
2825 return max(i * graphwidth // maxtotal, int(bool(i)))
2825 return max(i * graphwidth // maxtotal, int(bool(i)))
2826
2826
2827 for filename, adds, removes, isbinary in stats:
2827 for filename, adds, removes, isbinary in stats:
2828 if isbinary:
2828 if isbinary:
2829 count = 'Bin'
2829 count = 'Bin'
2830 else:
2830 else:
2831 count = '%d' % (adds + removes)
2831 count = '%d' % (adds + removes)
2832 pluses = '+' * scale(adds)
2832 pluses = '+' * scale(adds)
2833 minuses = '-' * scale(removes)
2833 minuses = '-' * scale(removes)
2834 output.append(' %s%s | %*s %s%s\n' %
2834 output.append(' %s%s | %*s %s%s\n' %
2835 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2835 (filename, ' ' * (maxname - encoding.colwidth(filename)),
2836 countwidth, count, pluses, minuses))
2836 countwidth, count, pluses, minuses))
2837
2837
2838 if stats:
2838 if stats:
2839 output.append(_(' %d files changed, %d insertions(+), '
2839 output.append(_(' %d files changed, %d insertions(+), '
2840 '%d deletions(-)\n')
2840 '%d deletions(-)\n')
2841 % (len(stats), totaladds, totalremoves))
2841 % (len(stats), totaladds, totalremoves))
2842
2842
2843 return ''.join(output)
2843 return ''.join(output)
2844
2844
2845 def diffstatui(*args, **kw):
2845 def diffstatui(*args, **kw):
2846 '''like diffstat(), but yields 2-tuples of (output, label) for
2846 '''like diffstat(), but yields 2-tuples of (output, label) for
2847 ui.write()
2847 ui.write()
2848 '''
2848 '''
2849
2849
2850 for line in diffstat(*args, **kw).splitlines():
2850 for line in diffstat(*args, **kw).splitlines():
2851 if line and line[-1] in '+-':
2851 if line and line[-1] in '+-':
2852 name, graph = line.rsplit(' ', 1)
2852 name, graph = line.rsplit(' ', 1)
2853 yield (name + ' ', '')
2853 yield (name + ' ', '')
2854 m = re.search(br'\++', graph)
2854 m = re.search(br'\++', graph)
2855 if m:
2855 if m:
2856 yield (m.group(0), 'diffstat.inserted')
2856 yield (m.group(0), 'diffstat.inserted')
2857 m = re.search(br'-+', graph)
2857 m = re.search(br'-+', graph)
2858 if m:
2858 if m:
2859 yield (m.group(0), 'diffstat.deleted')
2859 yield (m.group(0), 'diffstat.deleted')
2860 else:
2860 else:
2861 yield (line, '')
2861 yield (line, '')
2862 yield ('\n', '')
2862 yield ('\n', '')
General Comments 0
You need to be logged in to leave comments. Login now